forked from OSchip/llvm-project
4634 lines
399 KiB
YAML
Executable File
4634 lines
399 KiB
YAML
Executable File
# RUN: llc -mtriple=x86_64-- -run-pass x86-evex-to-vex-compress -verify-machineinstrs -mcpu=skx -o - %s | FileCheck %s
|
|
# This test verifies VEX encdoing for AVX-512 instructions that use registers of low inedexes and
|
|
# do not use zmm or mask registers and have a corresponding AVX/AVX2 opcode
|
|
|
|
--- |
|
|
define void @evex_z256_to_vex_test() { ret void }
|
|
define void @evex_z128_to_vex_test() { ret void }
|
|
define void @evex_scalar_to_vex_test() { ret void }
|
|
define void @evex_z256_to_evex_test() { ret void }
|
|
define void @evex_z128_to_evex_test() { ret void }
|
|
define void @evex_scalar_to_evex_test() { ret void }
|
|
...
|
|
---
|
|
# CHECK-LABEL: name: evex_z256_to_vex_test
|
|
# CHECK: bb.0:
|
|
|
|
name: evex_z256_to_vex_test
|
|
body: |
|
|
bb.0:
|
|
; CHECK: VMOVAPDYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVAPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVAPDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVAPDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVAPDYrr $ymm0
|
|
$ymm0 = VMOVAPDZ256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVAPDYrr_REV $ymm0
|
|
$ymm0 = VMOVAPDZ256rr_REV $ymm0
|
|
; CHECK: VMOVAPSYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVAPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVAPSYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVAPSZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVAPSYrr $ymm0
|
|
$ymm0 = VMOVAPSZ256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVAPSYrr_REV $ymm0
|
|
$ymm0 = VMOVAPSZ256rr_REV $ymm0
|
|
; CHECK: $ymm0 = VMOVDDUPYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVDDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVDDUPYrr $ymm0
|
|
$ymm0 = VMOVDDUPZ256rr $ymm0
|
|
; CHECK: VMOVDQAYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVDQA32Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVDQAYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVDQA32Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVDQAYrr $ymm0
|
|
$ymm0 = VMOVDQA32Z256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVDQAYrr_REV $ymm0
|
|
$ymm0 = VMOVDQA32Z256rr_REV $ymm0
|
|
; CHECK: VMOVDQAYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVDQA64Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVDQAYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVDQA64Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVDQAYrr $ymm0
|
|
$ymm0 = VMOVDQA64Z256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVDQAYrr_REV $ymm0
|
|
$ymm0 = VMOVDQA64Z256rr_REV $ymm0
|
|
; CHECK: VMOVDQUYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVDQU16Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVDQUYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVDQU16Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVDQUYrr $ymm0
|
|
$ymm0 = VMOVDQU16Z256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVDQUYrr_REV $ymm0
|
|
$ymm0 = VMOVDQU16Z256rr_REV $ymm0
|
|
; CHECK: VMOVDQUYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVDQU32Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVDQUYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVDQU32Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVDQUYrr $ymm0
|
|
$ymm0 = VMOVDQU32Z256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVDQUYrr_REV $ymm0
|
|
$ymm0 = VMOVDQU32Z256rr_REV $ymm0
|
|
; CHECK: VMOVDQUYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVDQU64Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVDQUYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVDQU64Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVDQUYrr $ymm0
|
|
$ymm0 = VMOVDQU64Z256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVDQUYrr_REV $ymm0
|
|
$ymm0 = VMOVDQU64Z256rr_REV $ymm0
|
|
; CHECK: VMOVDQUYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVDQU8Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVDQUYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVDQU8Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVDQUYrr $ymm0
|
|
$ymm0 = VMOVDQU8Z256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVDQUYrr_REV $ymm0
|
|
$ymm0 = VMOVDQU8Z256rr_REV $ymm0
|
|
; CHECK: $ymm0 = VMOVNTDQAYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVNTDQAZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: VMOVNTDQYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVNTDQZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: VMOVNTPDYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVNTPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: VMOVNTPSYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVNTPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVSHDUPYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVSHDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVSHDUPYrr $ymm0
|
|
$ymm0 = VMOVSHDUPZ256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVSLDUPYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVSLDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVSLDUPYrr $ymm0
|
|
$ymm0 = VMOVSLDUPZ256rr $ymm0
|
|
; CHECK: VMOVUPDYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVUPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VMOVUPDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMOVUPDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMOVUPDYrr $ymm0
|
|
$ymm0 = VMOVUPDZ256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVUPDYrr_REV $ymm0
|
|
$ymm0 = VMOVUPDZ256rr_REV $ymm0
|
|
; CHECK: VMOVUPSYmr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
VMOVUPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm0
|
|
; CHECK: $ymm0 = VPANDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPANDDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPANDYrr $ymm0, $ymm1
|
|
$ymm0 = VPANDDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPANDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPANDQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPANDYrr $ymm0, $ymm1
|
|
$ymm0 = VPANDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPANDNYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPANDNDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPANDNYrr $ymm0, $ymm1
|
|
$ymm0 = VPANDNDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPANDNYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPANDNQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPANDNYrr $ymm0, $ymm1
|
|
$ymm0 = VPANDNQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPAVGBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPAVGBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPAVGBYrr $ymm0, $ymm1
|
|
$ymm0 = VPAVGBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPAVGWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPAVGWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPAVGWYrr $ymm0, $ymm1
|
|
$ymm0 = VPAVGWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPADDBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPADDBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPADDBYrr $ymm0, $ymm1
|
|
$ymm0 = VPADDBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPADDDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPADDDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPADDDYrr $ymm0, $ymm1
|
|
$ymm0 = VPADDDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPADDQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPADDQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPADDQYrr $ymm0, $ymm1
|
|
$ymm0 = VPADDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPADDSBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPADDSBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPADDSBYrr $ymm0, $ymm1
|
|
$ymm0 = VPADDSBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPADDSWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPADDSWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPADDSWYrr $ymm0, $ymm1
|
|
$ymm0 = VPADDSWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPADDUSBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPADDUSBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPADDUSBYrr $ymm0, $ymm1
|
|
$ymm0 = VPADDUSBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPADDUSWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPADDUSWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPADDUSWYrr $ymm0, $ymm1
|
|
$ymm0 = VPADDUSWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPADDWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPADDWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPADDWYrr $ymm0, $ymm1
|
|
$ymm0 = VPADDWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMULPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMULPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMULPDYrr $ymm0, $ymm1
|
|
$ymm0 = VMULPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMULPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMULPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMULPSYrr $ymm0, $ymm1
|
|
$ymm0 = VMULPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VORPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VORPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VORPDYrr $ymm0, $ymm1
|
|
$ymm0 = VORPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VORPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VORPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VORPSYrr $ymm0, $ymm1
|
|
$ymm0 = VORPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMADDUBSWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMADDUBSWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMADDUBSWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMADDUBSWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMADDWDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMADDWDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMADDWDYrr $ymm0, $ymm1
|
|
$ymm0 = VPMADDWDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMAXSBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMAXSBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMAXSBYrr $ymm0, $ymm1
|
|
$ymm0 = VPMAXSBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMAXSDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMAXSDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMAXSDYrr $ymm0, $ymm1
|
|
$ymm0 = VPMAXSDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMAXSWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMAXSWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMAXSWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMAXSWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMAXUBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMAXUBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMAXUBYrr $ymm0, $ymm1
|
|
$ymm0 = VPMAXUBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMAXUDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMAXUDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMAXUDYrr $ymm0, $ymm1
|
|
$ymm0 = VPMAXUDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMAXUWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMAXUWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMAXUWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMAXUWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMINSBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMINSBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMINSBYrr $ymm0, $ymm1
|
|
$ymm0 = VPMINSBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMINSDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMINSDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMINSDYrr $ymm0, $ymm1
|
|
$ymm0 = VPMINSDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMINSWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMINSWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMINSWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMINSWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMINUBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMINUBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMINUBYrr $ymm0, $ymm1
|
|
$ymm0 = VPMINUBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMINUDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMINUDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMINUDYrr $ymm0, $ymm1
|
|
$ymm0 = VPMINUDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMINUWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMINUWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMINUWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMINUWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMULDQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMULDQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMULDQYrr $ymm0, $ymm1
|
|
$ymm0 = VPMULDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMULHRSWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMULHRSWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMULHRSWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMULHRSWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMULHUWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMULHUWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMULHUWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMULHUWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMULHWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMULHWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMULHWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMULHWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMULLDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMULLDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMULLDYrr $ymm0, $ymm1
|
|
$ymm0 = VPMULLDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMULLWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMULLWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMULLWYrr $ymm0, $ymm1
|
|
$ymm0 = VPMULLWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPMULUDQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMULUDQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMULUDQYrr $ymm0, $ymm1
|
|
$ymm0 = VPMULUDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPORYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPORDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPORYrr $ymm0, $ymm1
|
|
$ymm0 = VPORDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPORYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPORQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPORYrr $ymm0, $ymm1
|
|
$ymm0 = VPORQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSUBBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSUBBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSUBBYrr $ymm0, $ymm1
|
|
$ymm0 = VPSUBBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSUBDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSUBDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSUBDYrr $ymm0, $ymm1
|
|
$ymm0 = VPSUBDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSUBQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSUBQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSUBQYrr $ymm0, $ymm1
|
|
$ymm0 = VPSUBQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSUBSBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSUBSBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSUBSBYrr $ymm0, $ymm1
|
|
$ymm0 = VPSUBSBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSUBSWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSUBSWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSUBSWYrr $ymm0, $ymm1
|
|
$ymm0 = VPSUBSWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSUBUSBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSUBUSBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSUBUSBYrr $ymm0, $ymm1
|
|
$ymm0 = VPSUBUSBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSUBUSWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSUBUSWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSUBUSWYrr $ymm0, $ymm1
|
|
$ymm0 = VPSUBUSWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSUBWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSUBWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSUBWYrr $ymm0, $ymm1
|
|
$ymm0 = VPSUBWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPXORYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPXORDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPXORYrr $ymm0, $ymm1
|
|
$ymm0 = VPXORDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPXORYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPXORQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPXORYrr $ymm0, $ymm1
|
|
$ymm0 = VPXORQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VADDPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VADDPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VADDPDYrr $ymm0, $ymm1
|
|
$ymm0 = VADDPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VADDPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VADDPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VADDPSYrr $ymm0, $ymm1
|
|
$ymm0 = VADDPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VANDNPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VANDNPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VANDNPDYrr $ymm0, $ymm1
|
|
$ymm0 = VANDNPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VANDNPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VANDNPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VANDNPSYrr $ymm0, $ymm1
|
|
$ymm0 = VANDNPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VANDPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VANDPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VANDPDYrr $ymm0, $ymm1
|
|
$ymm0 = VANDPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VANDPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VANDPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VANDPSYrr $ymm0, $ymm1
|
|
$ymm0 = VANDPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VDIVPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VDIVPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VDIVPDYrr $ymm0, $ymm1
|
|
$ymm0 = VDIVPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VDIVPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VDIVPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VDIVPSYrr $ymm0, $ymm1
|
|
$ymm0 = VDIVPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMAXCPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMAXCPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMAXCPDYrr $ymm0, $ymm1
|
|
$ymm0 = VMAXCPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMAXCPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMAXCPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMAXCPSYrr $ymm0, $ymm1
|
|
$ymm0 = VMAXCPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMAXCPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMAXPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMAXCPDYrr $ymm0, $ymm1
|
|
$ymm0 = VMAXPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMAXCPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMAXPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMAXCPSYrr $ymm0, $ymm1
|
|
$ymm0 = VMAXPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMINCPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMINCPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMINCPDYrr $ymm0, $ymm1
|
|
$ymm0 = VMINCPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMINCPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMINCPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMINCPSYrr $ymm0, $ymm1
|
|
$ymm0 = VMINCPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMINCPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMINPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMINCPDYrr $ymm0, $ymm1
|
|
$ymm0 = VMINPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VMINCPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VMINPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VMINCPSYrr $ymm0, $ymm1
|
|
$ymm0 = VMINPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VXORPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VXORPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VXORPDYrr $ymm0, $ymm1
|
|
$ymm0 = VXORPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VXORPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VXORPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VXORPSYrr $ymm0, $ymm1
|
|
$ymm0 = VXORPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPACKSSDWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPACKSSDWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPACKSSDWYrr $ymm0, $ymm1
|
|
$ymm0 = VPACKSSDWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPACKSSWBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPACKSSWBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPACKSSWBYrr $ymm0, $ymm1
|
|
$ymm0 = VPACKSSWBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPACKUSDWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPACKUSDWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPACKUSDWYrr $ymm0, $ymm1
|
|
$ymm0 = VPACKUSDWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPACKUSWBYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPACKUSWBZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPACKUSWBYrr $ymm0, $ymm1
|
|
$ymm0 = VPACKUSWBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VUNPCKHPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VUNPCKHPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VUNPCKHPDYrr $ymm0, $ymm1
|
|
$ymm0 = VUNPCKHPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VUNPCKHPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VUNPCKHPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VUNPCKHPSYrr $ymm0, $ymm1
|
|
$ymm0 = VUNPCKHPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VUNPCKLPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VUNPCKLPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VUNPCKLPDYrr $ymm0, $ymm1
|
|
$ymm0 = VUNPCKLPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VUNPCKLPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VUNPCKLPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VUNPCKLPSYrr $ymm0, $ymm1
|
|
$ymm0 = VUNPCKLPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VSUBPDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VSUBPDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VSUBPDYrr $ymm0, $ymm1
|
|
$ymm0 = VSUBPDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VSUBPSYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VSUBPSZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VSUBPSYrr $ymm0, $ymm1
|
|
$ymm0 = VSUBPSZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPUNPCKHBWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPUNPCKHBWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPUNPCKHBWYrr $ymm0, $ymm1
|
|
$ymm0 = VPUNPCKHBWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPUNPCKHDQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPUNPCKHDQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPUNPCKHDQYrr $ymm0, $ymm1
|
|
$ymm0 = VPUNPCKHDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPUNPCKHQDQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPUNPCKHQDQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPUNPCKHQDQYrr $ymm0, $ymm1
|
|
$ymm0 = VPUNPCKHQDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPUNPCKHWDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPUNPCKHWDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPUNPCKHWDYrr $ymm0, $ymm1
|
|
$ymm0 = VPUNPCKHWDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPUNPCKLBWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPUNPCKLBWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPUNPCKLBWYrr $ymm0, $ymm1
|
|
$ymm0 = VPUNPCKLBWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPUNPCKLDQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPUNPCKLDQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPUNPCKLDQYrr $ymm0, $ymm1
|
|
$ymm0 = VPUNPCKLDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPUNPCKLQDQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPUNPCKLQDQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPUNPCKLQDQYrr $ymm0, $ymm1
|
|
$ymm0 = VPUNPCKLQDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPUNPCKLWDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPUNPCKLWDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPUNPCKLWDYrr $ymm0, $ymm1
|
|
$ymm0 = VPUNPCKLWDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VFMADD132PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADD132PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADD132PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADD132PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADD132PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADD132PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADD132PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADD132PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADD213PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADD213PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADD213PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADD213PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADD213PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADD213PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADD213PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADD213PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADD231PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADD231PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADD231PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADD231PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADD231PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADD231PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADD231PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADD231PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADDSUB132PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADDSUB132PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADDSUB132PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADDSUB132PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADDSUB132PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADDSUB132PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADDSUB132PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADDSUB132PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADDSUB213PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADDSUB213PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADDSUB213PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADDSUB213PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADDSUB213PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADDSUB213PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADDSUB213PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADDSUB213PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADDSUB231PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADDSUB231PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADDSUB231PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADDSUB231PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMADDSUB231PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMADDSUB231PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMADDSUB231PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMADDSUB231PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUB132PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUB132PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUB132PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUB132PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUB132PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUB132PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUB132PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUB132PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUB213PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUB213PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUB213PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUB213PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUB213PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUB213PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUB213PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUB213PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUB231PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUB231PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUB231PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUB231PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUB231PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUB231PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUB231PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUB231PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUBADD132PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUBADD132PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUBADD132PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUBADD132PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUBADD132PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUBADD132PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUBADD132PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUBADD132PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUBADD213PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUBADD213PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUBADD213PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUBADD213PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUBADD213PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUBADD213PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUBADD213PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUBADD213PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUBADD231PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUBADD231PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUBADD231PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUBADD231PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFMSUBADD231PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFMSUBADD231PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFMSUBADD231PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFMSUBADD231PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMADD132PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMADD132PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMADD132PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMADD132PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMADD132PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMADD132PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMADD132PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMADD132PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMADD213PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMADD213PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMADD213PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMADD213PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMADD213PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMADD213PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMADD213PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMADD213PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMADD231PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMADD231PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMADD231PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMADD231PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMADD231PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMADD231PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMADD231PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMADD231PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMSUB132PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMSUB132PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMSUB132PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMSUB132PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMSUB132PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMSUB132PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMSUB132PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMSUB132PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMSUB213PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMSUB213PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMSUB213PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMSUB213PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMSUB213PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMSUB213PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMSUB213PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMSUB213PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMSUB231PDYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMSUB231PDZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMSUB231PDYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMSUB231PDZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VFNMSUB231PSYm $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VFNMSUB231PSZ256m $ymm0, $ymm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VFNMSUB231PSYr $ymm0, $ymm1, $ymm2
|
|
$ymm0 = VFNMSUB231PSZ256r $ymm0, $ymm1, $ymm2
|
|
; CHECK: $ymm0 = VPSRADYri $ymm0, 7
|
|
$ymm0 = VPSRADZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSRADYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSRADZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSRADYrr $ymm0, $xmm1
|
|
$ymm0 = VPSRADZ256rr $ymm0, $xmm1
|
|
; CHECK: $ymm0 = VPSRAVDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSRAVDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSRAVDYrr $ymm0, $ymm1
|
|
$ymm0 = VPSRAVDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSRAWYri $ymm0, 7
|
|
$ymm0 = VPSRAWZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSRAWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSRAWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSRAWYrr $ymm0, $xmm1
|
|
$ymm0 = VPSRAWZ256rr $ymm0, $xmm1
|
|
; CHECK: $ymm0 = VPSRLDQYri $ymm0, $ymm1
|
|
$ymm0 = VPSRLDQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSRLDYri $ymm0, 7
|
|
$ymm0 = VPSRLDZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSRLDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSRLDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSRLDYrr $ymm0, $xmm1
|
|
$ymm0 = VPSRLDZ256rr $ymm0, $xmm1
|
|
; CHECK: $ymm0 = VPSRLQYri $ymm0, 7
|
|
$ymm0 = VPSRLQZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSRLQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSRLQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSRLQYrr $ymm0, $xmm1
|
|
$ymm0 = VPSRLQZ256rr $ymm0, $xmm1
|
|
; CHECK: $ymm0 = VPSRLVDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSRLVDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSRLVDYrr $ymm0, $ymm1
|
|
$ymm0 = VPSRLVDZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSRLVQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSRLVQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSRLVQYrr $ymm0, $ymm1
|
|
$ymm0 = VPSRLVQZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSRLWYri $ymm0, 7
|
|
$ymm0 = VPSRLWZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSRLWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSRLWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSRLWYrr $ymm0, $xmm1
|
|
$ymm0 = VPSRLWZ256rr $ymm0, $xmm1
|
|
; CHECK: $ymm0 = VPMOVSXBDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVSXBDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVSXBDYrr $xmm0
|
|
$ymm0 = VPMOVSXBDZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVSXBQYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVSXBQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVSXBQYrr $xmm0
|
|
$ymm0 = VPMOVSXBQZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVSXBWYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVSXBWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVSXBWYrr $xmm0
|
|
$ymm0 = VPMOVSXBWZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVSXDQYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVSXDQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVSXDQYrr $xmm0
|
|
$ymm0 = VPMOVSXDQZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVSXWDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVSXWDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVSXWDYrr $xmm0
|
|
$ymm0 = VPMOVSXWDZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVSXWQYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVSXWQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVSXWQYrr $xmm0
|
|
$ymm0 = VPMOVSXWQZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVZXBDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVZXBDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVZXBDYrr $xmm0
|
|
$ymm0 = VPMOVZXBDZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVZXBQYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVZXBQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVZXBQYrr $xmm0
|
|
$ymm0 = VPMOVZXBQZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVZXBWYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVZXBWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVZXBWYrr $xmm0
|
|
$ymm0 = VPMOVZXBWZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVZXDQYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVZXDQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVZXDQYrr $xmm0
|
|
$ymm0 = VPMOVZXDQZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVZXWDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVZXWDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVZXWDYrr $xmm0
|
|
$ymm0 = VPMOVZXWDZ256rr $xmm0
|
|
; CHECK: $ymm0 = VPMOVZXWQYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPMOVZXWQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPMOVZXWQYrr $xmm0
|
|
$ymm0 = VPMOVZXWQZ256rr $xmm0
|
|
; CHECK: $ymm0 = VBROADCASTF128 $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VBROADCASTF32X4Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VBROADCASTSDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VBROADCASTF32X2Z256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VBROADCASTSDYrr $xmm0
|
|
$ymm0 = VBROADCASTF32X2Z256r $xmm0
|
|
; CHECK: $ymm0 = VBROADCASTSDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VBROADCASTSDZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VBROADCASTSDYrr $xmm0
|
|
$ymm0 = VBROADCASTSDZ256r $xmm0
|
|
; CHECK: $ymm0 = VBROADCASTSSYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VBROADCASTSSZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VBROADCASTSSYrr $xmm0
|
|
$ymm0 = VBROADCASTSSZ256r $xmm0
|
|
; CHECK: $ymm0 = VPBROADCASTBYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPBROADCASTBZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPBROADCASTBYrr $xmm0
|
|
$ymm0 = VPBROADCASTBZ256r $xmm0
|
|
; CHECK: $ymm0 = VPBROADCASTDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPBROADCASTDZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPBROADCASTDYrr $xmm0
|
|
$ymm0 = VPBROADCASTDZ256r $xmm0
|
|
; CHECK: $ymm0 = VPBROADCASTWYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPBROADCASTWZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPBROADCASTWYrr $xmm0
|
|
$ymm0 = VPBROADCASTWZ256r $xmm0
|
|
; CHECK: $ymm0 = VBROADCASTI128 $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VBROADCASTI32X4Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPBROADCASTQYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VBROADCASTI32X2Z256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPBROADCASTQYrr $xmm0
|
|
$ymm0 = VBROADCASTI32X2Z256r $xmm0
|
|
; CHECK: $ymm0 = VPBROADCASTQYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPBROADCASTQZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPBROADCASTQYrr $xmm0
|
|
$ymm0 = VPBROADCASTQZ256r $xmm0
|
|
; CHECK: $ymm0 = VPABSBYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPABSBZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPABSBYrr $ymm0
|
|
$ymm0 = VPABSBZ256rr $ymm0
|
|
; CHECK: $ymm0 = VPABSDYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPABSDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPABSDYrr $ymm0
|
|
$ymm0 = VPABSDZ256rr $ymm0
|
|
; CHECK: $ymm0 = VPABSWYrm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPABSWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPABSWYrr $ymm0
|
|
$ymm0 = VPABSWZ256rr $ymm0
|
|
; CHECK: $ymm0 = VPSADBWYrm $ymm0, 1, $noreg, $rax, $noreg, $noreg
|
|
$ymm0 = VPSADBWZ256rm $ymm0, 1, $noreg, $rax, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPSADBWYrr $ymm0, $ymm1
|
|
$ymm0 = VPSADBWZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPERMDYrm $ymm0, $rdi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VPERMDZ256rm $ymm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VPERMDYrr $ymm1, $ymm0
|
|
$ymm0 = VPERMDZ256rr $ymm1, $ymm0
|
|
; CHECK: $ymm0 = VPERMILPDYmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm0 = VPERMILPDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPERMILPDYri $ymm0, 7
|
|
$ymm0 = VPERMILPDZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPERMILPDYrm $ymm0, $rdi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VPERMILPDZ256rm $ymm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VPERMILPDYrr $ymm1, $ymm0
|
|
$ymm0 = VPERMILPDZ256rr $ymm1, $ymm0
|
|
; CHECK: $ymm0 = VPERMILPSYmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm0 = VPERMILPSZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPERMILPSYri $ymm0, 7
|
|
$ymm0 = VPERMILPSZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPERMILPSYrm $ymm0, $rdi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VPERMILPSZ256rm $ymm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VPERMILPSYrr $ymm1, $ymm0
|
|
$ymm0 = VPERMILPSZ256rr $ymm1, $ymm0
|
|
; CHECK: $ymm0 = VPERMPDYmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm0 = VPERMPDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPERMPDYri $ymm0, 7
|
|
$ymm0 = VPERMPDZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPERMPSYrm $ymm0, $rdi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VPERMPSZ256rm $ymm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VPERMPSYrr $ymm1, $ymm0
|
|
$ymm0 = VPERMPSZ256rr $ymm1, $ymm0
|
|
; CHECK: $ymm0 = VPERMQYmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm0 = VPERMQZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPERMQYri $ymm0, 7
|
|
$ymm0 = VPERMQZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSLLDQYri $ymm0, 14
|
|
$ymm0 = VPSLLDQZ256rr $ymm0, 14
|
|
; CHECK: $ymm0 = VPSLLDYri $ymm0, 7
|
|
$ymm0 = VPSLLDZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSLLDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSLLDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSLLDYrr $ymm0, 14
|
|
$ymm0 = VPSLLDZ256rr $ymm0, 14
|
|
; CHECK: $ymm0 = VPSLLQYri $ymm0, 7
|
|
$ymm0 = VPSLLQZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSLLQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSLLQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSLLQYrr $ymm0, 14
|
|
$ymm0 = VPSLLQZ256rr $ymm0, 14
|
|
; CHECK: $ymm0 = VPSLLVDYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSLLVDZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSLLVDYrr $ymm0, 14
|
|
$ymm0 = VPSLLVDZ256rr $ymm0, 14
|
|
; CHECK: $ymm0 = VPSLLVQYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSLLVQZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSLLVQYrr $ymm0, 14
|
|
$ymm0 = VPSLLVQZ256rr $ymm0, 14
|
|
; CHECK: $ymm0 = VPSLLWYri $ymm0, 7
|
|
$ymm0 = VPSLLWZ256ri $ymm0, 7
|
|
; CHECK: $ymm0 = VPSLLWYrm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm0 = VPSLLWZ256rm $ymm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm0 = VPSLLWYrr $ymm0, 14
|
|
$ymm0 = VPSLLWZ256rr $ymm0, 14
|
|
; CHECK: $ymm0 = VCVTDQ2PDYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$ymm0 = VCVTDQ2PDZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $ymm0 = VCVTDQ2PDYrr $xmm0
|
|
$ymm0 = VCVTDQ2PDZ256rr $xmm0
|
|
; CHECK: $ymm0 = VCVTDQ2PSYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$ymm0 = VCVTDQ2PSZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $ymm0 = VCVTDQ2PSYrr $ymm0
|
|
$ymm0 = VCVTDQ2PSZ256rr $ymm0
|
|
; CHECK: $xmm0 = VCVTPD2DQYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$xmm0 = VCVTPD2DQZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPD2DQYrr $ymm0
|
|
$xmm0 = VCVTPD2DQZ256rr $ymm0
|
|
; CHECK: $xmm0 = VCVTPD2PSYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$xmm0 = VCVTPD2PSZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPD2PSYrr $ymm0
|
|
$xmm0 = VCVTPD2PSZ256rr $ymm0
|
|
; CHECK: $ymm0 = VCVTPS2DQYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$ymm0 = VCVTPS2DQZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $ymm0 = VCVTPS2DQYrr $ymm0
|
|
$ymm0 = VCVTPS2DQZ256rr $ymm0
|
|
; CHECK: $ymm0 = VCVTPS2PDYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$ymm0 = VCVTPS2PDZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $ymm0 = VCVTPS2PDYrr $xmm0
|
|
$ymm0 = VCVTPS2PDZ256rr $xmm0
|
|
; CHECK: VCVTPS2PHYmr $rdi, $ymm0, 1, $noreg, 0, $noreg, $noreg
|
|
VCVTPS2PHZ256mr $rdi, $ymm0, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VCVTPS2PHYrr $ymm0, $noreg
|
|
$xmm0 = VCVTPS2PHZ256rr $ymm0, $noreg
|
|
; CHECK: $ymm0 = VCVTPH2PSYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$ymm0 = VCVTPH2PSZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $ymm0 = VCVTPH2PSYrr $xmm0
|
|
$ymm0 = VCVTPH2PSZ256rr $xmm0
|
|
; CHECK: $xmm0 = VCVTTPD2DQYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$xmm0 = VCVTTPD2DQZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTTPD2DQYrr $ymm0
|
|
$xmm0 = VCVTTPD2DQZ256rr $ymm0
|
|
; CHECK: $ymm0 = VCVTTPS2DQYrm $rdi, $ymm0, 1, $noreg, 0
|
|
$ymm0 = VCVTTPS2DQZ256rm $rdi, $ymm0, 1, $noreg, 0
|
|
; CHECK: $ymm0 = VCVTTPS2DQYrr $ymm0
|
|
$ymm0 = VCVTTPS2DQZ256rr $ymm0
|
|
; CHECK: $ymm0 = VSQRTPDYm $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$ymm0 = VSQRTPDZ256m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm0 = VSQRTPDYr $ymm0
|
|
$ymm0 = VSQRTPDZ256r $ymm0
|
|
; CHECK: $ymm0 = VSQRTPSYm $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$ymm0 = VSQRTPSZ256m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm0 = VSQRTPSYr $ymm0
|
|
$ymm0 = VSQRTPSZ256r $ymm0
|
|
; CHECK: $ymm0 = VPALIGNRYrmi $ymm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$ymm0 = VPALIGNRZ256rmi $ymm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPALIGNRYrri $ymm0, $ymm1, $noreg
|
|
$ymm0 = VPALIGNRZ256rri $ymm0, $ymm1, $noreg
|
|
; CHECK: $ymm0 = VMOVUPSYrm $rdi, 1, $noreg, 0, $noreg
|
|
$ymm0 = VMOVUPSZ256rm $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm0 = VMOVUPSYrr $ymm0
|
|
$ymm0 = VMOVUPSZ256rr $ymm0
|
|
; CHECK: $ymm0 = VMOVUPSYrr_REV $ymm0
|
|
$ymm0 = VMOVUPSZ256rr_REV $ymm0
|
|
; CHECK: $ymm0 = VPSHUFBYrm $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$ymm0 = VPSHUFBZ256rm $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPSHUFBYrr $ymm0, $ymm1
|
|
$ymm0 = VPSHUFBZ256rr $ymm0, $ymm1
|
|
; CHECK: $ymm0 = VPSHUFDYmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm0 = VPSHUFDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPSHUFDYri $ymm0, -24
|
|
$ymm0 = VPSHUFDZ256ri $ymm0, -24
|
|
; CHECK: $ymm0 = VPSHUFHWYmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm0 = VPSHUFHWZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPSHUFHWYri $ymm0, -24
|
|
$ymm0 = VPSHUFHWZ256ri $ymm0, -24
|
|
; CHECK: $ymm0 = VPSHUFLWYmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm0 = VPSHUFLWZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm0 = VPSHUFLWYri $ymm0, -24
|
|
$ymm0 = VPSHUFLWZ256ri $ymm0, -24
|
|
; CHECK: $ymm0 = VSHUFPDYrmi $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$ymm0 = VSHUFPDZ256rmi $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm0 = VSHUFPDYrri $ymm0, $noreg, $noreg
|
|
$ymm0 = VSHUFPDZ256rri $ymm0, $noreg, $noreg
|
|
; CHECK: $ymm0 = VSHUFPSYrmi $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$ymm0 = VSHUFPSZ256rmi $ymm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm0 = VSHUFPSYrri $ymm0, $noreg, $noreg
|
|
$ymm0 = VSHUFPSZ256rri $ymm0, $noreg, $noreg
|
|
|
|
RET 0, $zmm0, $zmm1
|
|
...
|
|
---
|
|
# CHECK-LABEL: name: evex_z128_to_vex_test
|
|
# CHECK: bb.0:
|
|
|
|
name: evex_z128_to_vex_test
|
|
body: |
|
|
bb.0:
|
|
; CHECK: VMOVAPDmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVAPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVAPDrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVAPDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVAPDrr $xmm0
|
|
$xmm0 = VMOVAPDZ128rr $xmm0
|
|
; CHECK: VMOVAPSmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVAPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVAPSrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVAPSZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVAPSrr $xmm0
|
|
$xmm0 = VMOVAPSZ128rr $xmm0
|
|
; CHECK: VMOVDQAmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVDQA32Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVDQArm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVDQA32Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVDQArr $xmm0
|
|
$xmm0 = VMOVDQA32Z128rr $xmm0
|
|
; CHECK: VMOVDQAmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVDQA64Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVDQArm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVDQA64Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVDQArr $xmm0
|
|
$xmm0 = VMOVDQA64Z128rr $xmm0
|
|
; CHECK: VMOVDQUmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVDQU16Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVDQUrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVDQU16Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVDQUrr $xmm0
|
|
$xmm0 = VMOVDQU16Z128rr $xmm0
|
|
; CHECK: VMOVDQUmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVDQU32Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVDQUrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVDQU32Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVDQUrr $xmm0
|
|
$xmm0 = VMOVDQU32Z128rr $xmm0
|
|
; CHECK: VMOVDQUmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVDQU64Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVDQUrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVDQU64Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVDQUrr $xmm0
|
|
$xmm0 = VMOVDQU64Z128rr $xmm0
|
|
; CHECK: VMOVDQUmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVDQU8Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVDQUrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVDQU8Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVDQUrr $xmm0
|
|
$xmm0 = VMOVDQU8Z128rr $xmm0
|
|
; CHECK: $xmm0 = VMOVDQUrr_REV $xmm0
|
|
$xmm0 = VMOVDQU8Z128rr_REV $xmm0
|
|
; CHECK: $xmm0 = VMOVNTDQArm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVNTDQAZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: VMOVUPDmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVUPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVUPDrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVUPDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVUPDrr $xmm0
|
|
$xmm0 = VMOVUPDZ128rr $xmm0
|
|
; CHECK: $xmm0 = VMOVUPDrr_REV $xmm0
|
|
$xmm0 = VMOVUPDZ128rr_REV $xmm0
|
|
; CHECK: VMOVUPSmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVUPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVUPSrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMOVUPSZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMOVUPSrr $xmm0
|
|
$xmm0 = VMOVUPSZ128rr $xmm0
|
|
; CHECK: $xmm0 = VMOVUPSrr_REV $xmm0
|
|
$xmm0 = VMOVUPSZ128rr_REV $xmm0
|
|
; CHECK: VMOVNTDQmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVNTDQZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: VMOVNTPDmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVNTPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: VMOVNTPSmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVNTPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVAPDrr_REV $xmm0
|
|
$xmm0 = VMOVAPDZ128rr_REV $xmm0
|
|
; CHECK: $xmm0 = VMOVAPSrr_REV $xmm0
|
|
$xmm0 = VMOVAPSZ128rr_REV $xmm0
|
|
; CHECK: $xmm0 = VMOVDQArr_REV $xmm0
|
|
$xmm0 = VMOVDQA32Z128rr_REV $xmm0
|
|
; CHECK: $xmm0 = VMOVDQArr_REV $xmm0
|
|
$xmm0 = VMOVDQA64Z128rr_REV $xmm0
|
|
; CHECK: $xmm0 = VMOVDQUrr_REV $xmm0
|
|
$xmm0 = VMOVDQU16Z128rr_REV $xmm0
|
|
; CHECK: $xmm0 = VMOVDQUrr_REV $xmm0
|
|
$xmm0 = VMOVDQU32Z128rr_REV $xmm0
|
|
; CHECK: $xmm0 = VMOVDQUrr_REV $xmm0
|
|
$xmm0 = VMOVDQU64Z128rr_REV $xmm0
|
|
; CHECK: $xmm0 = VPMOVSXBDrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVSXBDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVSXBDrr $xmm0
|
|
$xmm0 = VPMOVSXBDZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVSXBQrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVSXBQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVSXBQrr $xmm0
|
|
$xmm0 = VPMOVSXBQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVSXBWrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVSXBWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVSXBWrr $xmm0
|
|
$xmm0 = VPMOVSXBWZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVSXDQrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVSXDQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVSXDQrr $xmm0
|
|
$xmm0 = VPMOVSXDQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVSXWDrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVSXWDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVSXWDrr $xmm0
|
|
$xmm0 = VPMOVSXWDZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVSXWQrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVSXWQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVSXWQrr $xmm0
|
|
$xmm0 = VPMOVSXWQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVZXBDrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVZXBDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVZXBDrr $xmm0
|
|
$xmm0 = VPMOVZXBDZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVZXBQrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVZXBQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVZXBQrr $xmm0
|
|
$xmm0 = VPMOVZXBQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVZXBWrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVZXBWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVZXBWrr $xmm0
|
|
$xmm0 = VPMOVZXBWZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVZXDQrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVZXDQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVZXDQrr $xmm0
|
|
$xmm0 = VPMOVZXDQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVZXWDrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVZXWDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVZXWDrr $xmm0
|
|
$xmm0 = VPMOVZXWDZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPMOVZXWQrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMOVZXWQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMOVZXWQrr $xmm0
|
|
$xmm0 = VPMOVZXWQZ128rr $xmm0
|
|
; CHECK: VMOVHPDmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVHPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVHPDrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VMOVHPDZ128rm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: VMOVHPSmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVHPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVHPSrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VMOVHPSZ128rm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: VMOVLPDmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVLPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVLPDrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VMOVLPDZ128rm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: VMOVLPSmr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
VMOVLPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm0
|
|
; CHECK: $xmm0 = VMOVLPSrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VMOVLPSZ128rm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VMAXCPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXCPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXCPDrr $xmm0, $xmm1
|
|
$xmm0 = VMAXCPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXCPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXCPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXCPSrr $xmm0, $xmm1
|
|
$xmm0 = VMAXCPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXCPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXCPDrr $xmm0, $xmm1
|
|
$xmm0 = VMAXPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXCPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXCPSrr $xmm0, $xmm1
|
|
$xmm0 = VMAXPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINCPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINCPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINCPDrr $xmm0, $xmm1
|
|
$xmm0 = VMINCPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINCPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINCPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINCPSrr $xmm0, $xmm1
|
|
$xmm0 = VMINCPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINCPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINCPDrr $xmm0, $xmm1
|
|
$xmm0 = VMINPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINCPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINCPSrr $xmm0, $xmm1
|
|
$xmm0 = VMINPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMULPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMULPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMULPDrr $xmm0, $xmm1
|
|
$xmm0 = VMULPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMULPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMULPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMULPSrr $xmm0, $xmm1
|
|
$xmm0 = VMULPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VORPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VORPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VORPDrr $xmm0, $xmm1
|
|
$xmm0 = VORPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VORPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VORPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VORPSrr $xmm0, $xmm1
|
|
$xmm0 = VORPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPADDBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPADDBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPADDBrr $xmm0, $xmm1
|
|
$xmm0 = VPADDBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPADDDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPADDDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPADDDrr $xmm0, $xmm1
|
|
$xmm0 = VPADDDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPADDQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPADDQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPADDQrr $xmm0, $xmm1
|
|
$xmm0 = VPADDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPADDSBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPADDSBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPADDSBrr $xmm0, $xmm1
|
|
$xmm0 = VPADDSBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPADDSWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPADDSWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPADDSWrr $xmm0, $xmm1
|
|
$xmm0 = VPADDSWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPADDUSBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPADDUSBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPADDUSBrr $xmm0, $xmm1
|
|
$xmm0 = VPADDUSBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPADDUSWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPADDUSWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPADDUSWrr $xmm0, $xmm1
|
|
$xmm0 = VPADDUSWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPADDWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPADDWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPADDWrr $xmm0, $xmm1
|
|
$xmm0 = VPADDWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPANDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPANDDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPANDrr $xmm0, $xmm1
|
|
$xmm0 = VPANDDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPANDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPANDQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPANDrr $xmm0, $xmm1
|
|
$xmm0 = VPANDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPANDNrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPANDNDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPANDNrr $xmm0, $xmm1
|
|
$xmm0 = VPANDNDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPANDNrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPANDNQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPANDNrr $xmm0, $xmm1
|
|
$xmm0 = VPANDNQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPAVGBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPAVGBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPAVGBrr $xmm0, $xmm1
|
|
$xmm0 = VPAVGBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPAVGWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPAVGWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPAVGWrr $xmm0, $xmm1
|
|
$xmm0 = VPAVGWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMAXSBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMAXSBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMAXSBrr $xmm0, $xmm1
|
|
$xmm0 = VPMAXSBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMAXSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMAXSDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMAXSDrr $xmm0, $xmm1
|
|
$xmm0 = VPMAXSDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMAXSWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMAXSWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMAXSWrr $xmm0, $xmm1
|
|
$xmm0 = VPMAXSWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMAXUBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMAXUBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMAXUBrr $xmm0, $xmm1
|
|
$xmm0 = VPMAXUBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMAXUDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMAXUDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMAXUDrr $xmm0, $xmm1
|
|
$xmm0 = VPMAXUDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMAXUWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMAXUWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMAXUWrr $xmm0, $xmm1
|
|
$xmm0 = VPMAXUWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMINSBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMINSBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMINSBrr $xmm0, $xmm1
|
|
$xmm0 = VPMINSBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMINSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMINSDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMINSDrr $xmm0, $xmm1
|
|
$xmm0 = VPMINSDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMINSWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMINSWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMINSWrr $xmm0, $xmm1
|
|
$xmm0 = VPMINSWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMINUBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMINUBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMINUBrr $xmm0, $xmm1
|
|
$xmm0 = VPMINUBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMINUDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMINUDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMINUDrr $xmm0, $xmm1
|
|
$xmm0 = VPMINUDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMINUWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMINUWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMINUWrr $xmm0, $xmm1
|
|
$xmm0 = VPMINUWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMULDQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMULDQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMULDQrr $xmm0, $xmm1
|
|
$xmm0 = VPMULDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMULHRSWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMULHRSWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMULHRSWrr $xmm0, $xmm1
|
|
$xmm0 = VPMULHRSWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMULHUWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMULHUWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMULHUWrr $xmm0, $xmm1
|
|
$xmm0 = VPMULHUWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMULHWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMULHWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMULHWrr $xmm0, $xmm1
|
|
$xmm0 = VPMULHWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMULLDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMULLDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMULLDrr $xmm0, $xmm1
|
|
$xmm0 = VPMULLDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMULLWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMULLWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMULLWrr $xmm0, $xmm1
|
|
$xmm0 = VPMULLWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMULUDQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMULUDQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMULUDQrr $xmm0, $xmm1
|
|
$xmm0 = VPMULUDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPORrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPORDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPORrr $xmm0, $xmm1
|
|
$xmm0 = VPORDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPORrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPORQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPORrr $xmm0, $xmm1
|
|
$xmm0 = VPORQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSUBBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSUBBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSUBBrr $xmm0, $xmm1
|
|
$xmm0 = VPSUBBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSUBDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSUBDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSUBDrr $xmm0, $xmm1
|
|
$xmm0 = VPSUBDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSUBQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSUBQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSUBQrr $xmm0, $xmm1
|
|
$xmm0 = VPSUBQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSUBSBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSUBSBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSUBSBrr $xmm0, $xmm1
|
|
$xmm0 = VPSUBSBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSUBSWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSUBSWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSUBSWrr $xmm0, $xmm1
|
|
$xmm0 = VPSUBSWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSUBUSBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSUBUSBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSUBUSBrr $xmm0, $xmm1
|
|
$xmm0 = VPSUBUSBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSUBUSWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSUBUSWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSUBUSWrr $xmm0, $xmm1
|
|
$xmm0 = VPSUBUSWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSUBWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSUBWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSUBWrr $xmm0, $xmm1
|
|
$xmm0 = VPSUBWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VADDPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VADDPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VADDPDrr $xmm0, $xmm1
|
|
$xmm0 = VADDPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VADDPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VADDPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VADDPSrr $xmm0, $xmm1
|
|
$xmm0 = VADDPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VANDNPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VANDNPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VANDNPDrr $xmm0, $xmm1
|
|
$xmm0 = VANDNPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VANDNPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VANDNPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VANDNPSrr $xmm0, $xmm1
|
|
$xmm0 = VANDNPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VANDPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VANDPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VANDPDrr $xmm0, $xmm1
|
|
$xmm0 = VANDPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VANDPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VANDPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VANDPSrr $xmm0, $xmm1
|
|
$xmm0 = VANDPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VDIVPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VDIVPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VDIVPDrr $xmm0, $xmm1
|
|
$xmm0 = VDIVPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VDIVPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VDIVPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VDIVPSrr $xmm0, $xmm1
|
|
$xmm0 = VDIVPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPXORrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPXORDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPXORrr $xmm0, $xmm1
|
|
$xmm0 = VPXORDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPXORrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPXORQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPXORrr $xmm0, $xmm1
|
|
$xmm0 = VPXORQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VSUBPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VSUBPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VSUBPDrr $xmm0, $xmm1
|
|
$xmm0 = VSUBPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VSUBPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VSUBPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VSUBPSrr $xmm0, $xmm1
|
|
$xmm0 = VSUBPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VXORPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VXORPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VXORPDrr $xmm0, $xmm1
|
|
$xmm0 = VXORPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VXORPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VXORPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VXORPSrr $xmm0, $xmm1
|
|
$xmm0 = VXORPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMADDUBSWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMADDUBSWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMADDUBSWrr $xmm0, $xmm1
|
|
$xmm0 = VPMADDUBSWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPMADDWDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPMADDWDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPMADDWDrr $xmm0, $xmm1
|
|
$xmm0 = VPMADDWDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPACKSSDWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPACKSSDWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPACKSSDWrr $xmm0, $xmm1
|
|
$xmm0 = VPACKSSDWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPACKSSWBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPACKSSWBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPACKSSWBrr $xmm0, $xmm1
|
|
$xmm0 = VPACKSSWBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPACKUSDWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPACKUSDWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPACKUSDWrr $xmm0, $xmm1
|
|
$xmm0 = VPACKUSDWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPACKUSWBrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPACKUSWBZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPACKUSWBrr $xmm0, $xmm1
|
|
$xmm0 = VPACKUSWBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPUNPCKHBWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPUNPCKHBWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPUNPCKHBWrr $xmm0, $xmm1
|
|
$xmm0 = VPUNPCKHBWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPUNPCKHDQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPUNPCKHDQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPUNPCKHDQrr $xmm0, $xmm1
|
|
$xmm0 = VPUNPCKHDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPUNPCKHQDQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPUNPCKHQDQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPUNPCKHQDQrr $xmm0, $xmm1
|
|
$xmm0 = VPUNPCKHQDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPUNPCKHWDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPUNPCKHWDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPUNPCKHWDrr $xmm0, $xmm1
|
|
$xmm0 = VPUNPCKHWDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPUNPCKLBWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPUNPCKLBWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPUNPCKLBWrr $xmm0, $xmm1
|
|
$xmm0 = VPUNPCKLBWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPUNPCKLDQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPUNPCKLDQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPUNPCKLDQrr $xmm0, $xmm1
|
|
$xmm0 = VPUNPCKLDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPUNPCKLQDQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPUNPCKLQDQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPUNPCKLQDQrr $xmm0, $xmm1
|
|
$xmm0 = VPUNPCKLQDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPUNPCKLWDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPUNPCKLWDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPUNPCKLWDrr $xmm0, $xmm1
|
|
$xmm0 = VPUNPCKLWDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VUNPCKHPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VUNPCKHPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VUNPCKHPDrr $xmm0, $xmm1
|
|
$xmm0 = VUNPCKHPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VUNPCKHPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VUNPCKHPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VUNPCKHPSrr $xmm0, $xmm1
|
|
$xmm0 = VUNPCKHPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VUNPCKLPDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VUNPCKLPDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VUNPCKLPDrr $xmm0, $xmm1
|
|
$xmm0 = VUNPCKLPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VUNPCKLPSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VUNPCKLPSZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VUNPCKLPSrr $xmm0, $xmm1
|
|
$xmm0 = VUNPCKLPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VFMADD132PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD132PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD132PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD132PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD132PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD132PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD132PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD132PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD213PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD213PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD213PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD213PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD213PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD213PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD213PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD213PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD231PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD231PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD231PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD231PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD231PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD231PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD231PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD231PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADDSUB132PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADDSUB132PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADDSUB132PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADDSUB132PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADDSUB132PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADDSUB132PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADDSUB132PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADDSUB132PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADDSUB213PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADDSUB213PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADDSUB213PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADDSUB213PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADDSUB213PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADDSUB213PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADDSUB213PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADDSUB213PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADDSUB231PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADDSUB231PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADDSUB231PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADDSUB231PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADDSUB231PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADDSUB231PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADDSUB231PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADDSUB231PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB132PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB132PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB132PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB132PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB132PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB132PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB132PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB132PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB213PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB213PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB213PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB213PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB213PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB213PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB213PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB213PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB231PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB231PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB231PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB231PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB231PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB231PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB231PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB231PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUBADD132PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUBADD132PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUBADD132PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUBADD132PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUBADD132PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUBADD132PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUBADD132PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUBADD132PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUBADD213PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUBADD213PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUBADD213PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUBADD213PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUBADD213PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUBADD213PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUBADD213PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUBADD213PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUBADD231PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUBADD231PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUBADD231PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUBADD231PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUBADD231PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUBADD231PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUBADD231PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUBADD231PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD132PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD132PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD132PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD132PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD132PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD132PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD132PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD132PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD213PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD213PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD213PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD213PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD213PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD213PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD213PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD213PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD231PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD231PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD231PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD231PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD231PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD231PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD231PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD231PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB132PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB132PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB132PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB132PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB132PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB132PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB132PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB132PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB213PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB213PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB213PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB213PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB213PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB213PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB213PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB213PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB231PDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB231PDZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB231PDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB231PDZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB231PSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB231PSZ128m $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB231PSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB231PSZ128r $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VPSLLDri $xmm0, 7
|
|
$xmm0 = VPSLLDZ128ri $xmm0, 7
|
|
; CHECK: $xmm0 = VPSLLDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSLLDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSLLDrr $xmm0, 14
|
|
$xmm0 = VPSLLDZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSLLQri $xmm0, 7
|
|
$xmm0 = VPSLLQZ128ri $xmm0, 7
|
|
; CHECK: $xmm0 = VPSLLQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSLLQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSLLQrr $xmm0, 14
|
|
$xmm0 = VPSLLQZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSLLVDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSLLVDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSLLVDrr $xmm0, 14
|
|
$xmm0 = VPSLLVDZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSLLVQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSLLVQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSLLVQrr $xmm0, 14
|
|
$xmm0 = VPSLLVQZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSLLWri $xmm0, 7
|
|
$xmm0 = VPSLLWZ128ri $xmm0, 7
|
|
; CHECK: $xmm0 = VPSLLWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSLLWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSLLWrr $xmm0, 14
|
|
$xmm0 = VPSLLWZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRADri $xmm0, 7
|
|
$xmm0 = VPSRADZ128ri $xmm0, 7
|
|
; CHECK: $xmm0 = VPSRADrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSRADZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSRADrr $xmm0, 14
|
|
$xmm0 = VPSRADZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRAVDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSRAVDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSRAVDrr $xmm0, 14
|
|
$xmm0 = VPSRAVDZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRAWri $xmm0, 7
|
|
$xmm0 = VPSRAWZ128ri $xmm0, 7
|
|
; CHECK: $xmm0 = VPSRAWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSRAWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSRAWrr $xmm0, 14
|
|
$xmm0 = VPSRAWZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRLDQri $xmm0, 14
|
|
$xmm0 = VPSRLDQZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRLDri $xmm0, 7
|
|
$xmm0 = VPSRLDZ128ri $xmm0, 7
|
|
; CHECK: $xmm0 = VPSRLDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSRLDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSRLDrr $xmm0, 14
|
|
$xmm0 = VPSRLDZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRLQri $xmm0, 7
|
|
$xmm0 = VPSRLQZ128ri $xmm0, 7
|
|
; CHECK: $xmm0 = VPSRLQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSRLQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSRLQrr $xmm0, 14
|
|
$xmm0 = VPSRLQZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRLVDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSRLVDZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSRLVDrr $xmm0, 14
|
|
$xmm0 = VPSRLVDZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRLVQrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSRLVQZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSRLVQrr $xmm0, 14
|
|
$xmm0 = VPSRLVQZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPSRLWri $xmm0, 7
|
|
$xmm0 = VPSRLWZ128ri $xmm0, 7
|
|
; CHECK: $xmm0 = VPSRLWrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPSRLWZ128rm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPSRLWrr $xmm0, 14
|
|
$xmm0 = VPSRLWZ128rr $xmm0, 14
|
|
; CHECK: $xmm0 = VPERMILPDmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm0 = VPERMILPDZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPERMILPDri $xmm0, 9
|
|
$xmm0 = VPERMILPDZ128ri $xmm0, 9
|
|
; CHECK: $xmm0 = VPERMILPDrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VPERMILPDZ128rm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VPERMILPDrr $xmm0, $xmm1
|
|
$xmm0 = VPERMILPDZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPERMILPSmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm0 = VPERMILPSZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPERMILPSri $xmm0, 9
|
|
$xmm0 = VPERMILPSZ128ri $xmm0, 9
|
|
; CHECK: $xmm0 = VPERMILPSrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VPERMILPSZ128rm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VPERMILPSrr $xmm0, $xmm1
|
|
$xmm0 = VPERMILPSZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VCVTPH2PSrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTPH2PSZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPH2PSrr $xmm0
|
|
$xmm0 = VCVTPH2PSZ128rr $xmm0
|
|
; CHECK: $xmm0 = VCVTDQ2PDrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTDQ2PDZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTDQ2PDrr $xmm0
|
|
$xmm0 = VCVTDQ2PDZ128rr $xmm0
|
|
; CHECK: $xmm0 = VCVTDQ2PSrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTDQ2PSZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTDQ2PSrr $xmm0
|
|
$xmm0 = VCVTDQ2PSZ128rr $xmm0
|
|
; CHECK: $xmm0 = VCVTPD2DQrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTPD2DQZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPD2DQrr $xmm0
|
|
$xmm0 = VCVTPD2DQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VCVTPD2PSrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTPD2PSZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPD2PSrr $xmm0
|
|
$xmm0 = VCVTPD2PSZ128rr $xmm0
|
|
; CHECK: $xmm0 = VCVTPS2DQrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTPS2DQZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPS2DQrr $xmm0
|
|
$xmm0 = VCVTPS2DQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VCVTPS2PDrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTPS2PDZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPS2PDrr $xmm0
|
|
$xmm0 = VCVTPS2PDZ128rr $xmm0
|
|
; CHECK: $xmm0 = VCVTTPD2DQrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTTPD2DQZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTTPD2DQrr $xmm0
|
|
$xmm0 = VCVTTPD2DQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VCVTTPS2DQrm $rdi, $xmm0, 1, $noreg, 0
|
|
$xmm0 = VCVTTPS2DQZ128rm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTTPS2DQrr $xmm0
|
|
$xmm0 = VCVTTPS2DQZ128rr $xmm0
|
|
; CHECK: $xmm0 = VSQRTPDm $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VSQRTPDZ128m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSQRTPDr $xmm0
|
|
$xmm0 = VSQRTPDZ128r $xmm0
|
|
; CHECK: $xmm0 = VSQRTPSm $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VSQRTPSZ128m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSQRTPSr $xmm0
|
|
$xmm0 = VSQRTPSZ128r $xmm0
|
|
; CHECK: $xmm0 = VMOVDDUPrm $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VMOVDDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VMOVDDUPrr $xmm0
|
|
$xmm0 = VMOVDDUPZ128rr $xmm0
|
|
; CHECK: $xmm0 = VMOVSHDUPrm $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VMOVSHDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VMOVSHDUPrr $xmm0
|
|
$xmm0 = VMOVSHDUPZ128rr $xmm0
|
|
; CHECK: $xmm0 = VMOVSLDUPrm $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VMOVSLDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VMOVSLDUPrr $xmm0
|
|
$xmm0 = VMOVSLDUPZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPSHUFBrm $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VPSHUFBZ128rm $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPSHUFBrr $xmm0, $xmm1
|
|
$xmm0 = VPSHUFBZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VPSHUFDmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm0 = VPSHUFDZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPSHUFDri $xmm0, -24
|
|
$xmm0 = VPSHUFDZ128ri $xmm0, -24
|
|
; CHECK: $xmm0 = VPSHUFHWmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm0 = VPSHUFHWZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPSHUFHWri $xmm0, -24
|
|
$xmm0 = VPSHUFHWZ128ri $xmm0, -24
|
|
; CHECK: $xmm0 = VPSHUFLWmi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm0 = VPSHUFLWZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPSHUFLWri $xmm0, -24
|
|
$xmm0 = VPSHUFLWZ128ri $xmm0, -24
|
|
; CHECK: $xmm0 = VPSLLDQri $xmm0, $xmm1
|
|
$xmm0 = VPSLLDQZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VSHUFPDrmi $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VSHUFPDZ128rmi $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSHUFPDrri $xmm0, $noreg, $noreg
|
|
$xmm0 = VSHUFPDZ128rri $xmm0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSHUFPSrmi $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VSHUFPSZ128rmi $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSHUFPSrri $xmm0, $noreg, $noreg
|
|
$xmm0 = VSHUFPSZ128rri $xmm0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPSADBWrm $xmm0, 1, $noreg, $rax, $noreg, $noreg
|
|
$xmm0 = VPSADBWZ128rm $xmm0, 1, $noreg, $rax, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPSADBWrr $xmm0, $xmm1
|
|
$xmm0 = VPSADBWZ128rr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VBROADCASTSSrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VBROADCASTSSZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VBROADCASTSSrr $xmm0
|
|
$xmm0 = VBROADCASTSSZ128r $xmm0
|
|
; CHECK: $xmm0 = VPBROADCASTBrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VPBROADCASTBZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPBROADCASTBrr $xmm0
|
|
$xmm0 = VPBROADCASTBZ128r $xmm0
|
|
; CHECK: $xmm0 = VPBROADCASTDrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VPBROADCASTDZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPBROADCASTDrr $xmm0
|
|
$xmm0 = VPBROADCASTDZ128r $xmm0
|
|
; CHECK: $xmm0 = VPBROADCASTQrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VPBROADCASTQZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPBROADCASTQrr $xmm0
|
|
$xmm0 = VPBROADCASTQZ128r $xmm0
|
|
; CHECK: $xmm0 = VPBROADCASTWrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VPBROADCASTWZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPBROADCASTWrr $xmm0
|
|
$xmm0 = VPBROADCASTWZ128r $xmm0
|
|
; CHECK: $xmm0 = VPBROADCASTQrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VBROADCASTI32X2Z128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPBROADCASTQrr $xmm0
|
|
$xmm0 = VBROADCASTI32X2Z128r $xmm0
|
|
; CHECK: $xmm0 = VCVTPS2PHrr $xmm0, 2
|
|
$xmm0 = VCVTPS2PHZ128rr $xmm0, 2
|
|
; CHECK: VCVTPS2PHmr $rdi, $xmm0, 1, $noreg, 0, $noreg, $noreg
|
|
VCVTPS2PHZ128mr $rdi, $xmm0, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPABSBrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPABSBZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPABSBrr $xmm0
|
|
$xmm0 = VPABSBZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPABSDrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPABSDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPABSDrr $xmm0
|
|
$xmm0 = VPABSDZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPABSWrm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VPABSWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VPABSWrr $xmm0
|
|
$xmm0 = VPABSWZ128rr $xmm0
|
|
; CHECK: $xmm0 = VPALIGNRrmi $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VPALIGNRZ128rmi $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VPALIGNRrri $xmm0, $xmm1, 15
|
|
$xmm0 = VPALIGNRZ128rri $xmm0, $xmm1, 15
|
|
|
|
RET 0, $zmm0, $zmm1
|
|
...
|
|
---
|
|
# CHECK-LABEL: name: evex_scalar_to_vex_test
|
|
# CHECK: bb.0:
|
|
|
|
name: evex_scalar_to_vex_test
|
|
body: |
|
|
bb.0:
|
|
|
|
; CHECK: $xmm0 = VADDSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VADDSDZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VADDSDrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VADDSDZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VADDSDrr $xmm0, $xmm1
|
|
$xmm0 = VADDSDZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VADDSDrr_Int $xmm0, $xmm1
|
|
$xmm0 = VADDSDZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VADDSSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VADDSSZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VADDSSrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VADDSSZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VADDSSrr $xmm0, $xmm1
|
|
$xmm0 = VADDSSZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VADDSSrr_Int $xmm0, $xmm1
|
|
$xmm0 = VADDSSZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VDIVSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VDIVSDZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VDIVSDrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VDIVSDZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VDIVSDrr $xmm0, $xmm1
|
|
$xmm0 = VDIVSDZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VDIVSDrr_Int $xmm0, $xmm1
|
|
$xmm0 = VDIVSDZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VDIVSSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VDIVSSZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VDIVSSrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VDIVSSZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VDIVSSrr $xmm0, $xmm1
|
|
$xmm0 = VDIVSSZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VDIVSSrr_Int $xmm0, $xmm1
|
|
$xmm0 = VDIVSSZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXCSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXCSDZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXCSDrr $xmm0, $xmm1
|
|
$xmm0 = VMAXCSDZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXCSSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXCSSZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXCSSrr $xmm0, $xmm1
|
|
$xmm0 = VMAXCSSZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXCSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXSDZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXSDrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXSDZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXCSDrr $xmm0, $xmm1
|
|
$xmm0 = VMAXSDZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXSDrr_Int $xmm0, $xmm1
|
|
$xmm0 = VMAXSDZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXCSSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXSSZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXSSrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMAXSSZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMAXCSSrr $xmm0, $xmm1
|
|
$xmm0 = VMAXSSZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMAXSSrr_Int $xmm0, $xmm1
|
|
$xmm0 = VMAXSSZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINCSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINCSDZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINCSDrr $xmm0, $xmm1
|
|
$xmm0 = VMINCSDZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINCSSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINCSSZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINCSSrr $xmm0, $xmm1
|
|
$xmm0 = VMINCSSZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINCSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINSDZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINSDrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINSDZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINCSDrr $xmm0, $xmm1
|
|
$xmm0 = VMINSDZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINSDrr_Int $xmm0, $xmm1
|
|
$xmm0 = VMINSDZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINCSSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINSSZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINSSrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMINSSZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMINCSSrr $xmm0, $xmm1
|
|
$xmm0 = VMINSSZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMINSSrr_Int $xmm0, $xmm1
|
|
$xmm0 = VMINSSZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMULSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMULSDZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMULSDrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMULSDZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMULSDrr $xmm0, $xmm1
|
|
$xmm0 = VMULSDZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMULSDrr_Int $xmm0, $xmm1
|
|
$xmm0 = VMULSDZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMULSSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMULSSZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMULSSrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VMULSSZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VMULSSrr $xmm0, $xmm1
|
|
$xmm0 = VMULSSZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VMULSSrr_Int $xmm0, $xmm1
|
|
$xmm0 = VMULSSZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VSUBSDrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VSUBSDZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VSUBSDrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VSUBSDZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VSUBSDrr $xmm0, $xmm1
|
|
$xmm0 = VSUBSDZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VSUBSDrr_Int $xmm0, $xmm1
|
|
$xmm0 = VSUBSDZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VSUBSSrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VSUBSSZrm $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VSUBSSrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm0 = VSUBSSZrm_Int $xmm0, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm0 = VSUBSSrr $xmm0, $xmm1
|
|
$xmm0 = VSUBSSZrr $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VSUBSSrr_Int $xmm0, $xmm1
|
|
$xmm0 = VSUBSSZrr_Int $xmm0, $xmm1
|
|
; CHECK: $xmm0 = VFMADD132SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD132SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD132SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD132SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD132SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD132SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD132SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD132SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD132SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD132SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD132SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD132SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD132SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD132SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD132SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD132SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD213SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD213SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD213SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD213SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD213SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD213SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD213SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD213SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD213SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD213SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD213SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD213SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD213SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD213SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD213SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD213SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD231SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD231SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD231SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD231SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD231SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD231SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD231SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD231SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD231SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD231SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD231SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMADD231SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMADD231SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD231SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMADD231SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMADD231SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB132SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB132SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB132SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB132SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB132SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB132SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB132SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB132SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB132SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB132SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB132SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB132SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB132SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB132SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB132SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB132SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB213SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB213SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB213SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB213SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB213SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB213SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB213SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB213SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB213SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB213SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB213SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB213SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB213SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB213SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB213SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB213SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB231SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB231SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB231SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB231SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB231SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB231SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB231SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB231SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB231SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB231SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB231SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFMSUB231SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFMSUB231SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB231SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFMSUB231SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFMSUB231SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD132SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD132SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD132SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD132SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD132SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD132SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD132SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD132SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD132SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD132SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD132SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD132SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD132SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD132SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD132SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD132SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD213SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD213SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD213SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD213SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD213SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD213SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD213SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD213SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD213SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD213SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD213SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD213SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD213SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD213SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD213SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD213SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD231SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD231SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD231SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD231SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD231SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD231SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD231SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD231SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD231SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD231SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD231SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMADD231SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMADD231SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD231SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMADD231SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMADD231SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB132SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB132SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB132SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB132SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB132SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB132SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB132SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB132SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB132SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB132SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB132SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB132SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB132SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB132SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB132SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB132SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB213SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB213SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB213SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB213SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB213SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB213SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB213SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB213SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB213SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB213SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB213SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB213SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB213SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB213SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB213SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB213SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB231SDm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB231SDZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB231SDm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB231SDZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB231SDr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB231SDZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB231SDr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB231SDZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB231SSm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB231SSZm $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB231SSm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VFNMSUB231SSZm_Int $xmm0, $xmm0, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VFNMSUB231SSr $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB231SSZr $xmm0, $xmm1, $xmm2
|
|
; CHECK: $xmm0 = VFNMSUB231SSr_Int $xmm0, $xmm1, $xmm2
|
|
$xmm0 = VFNMSUB231SSZr_Int $xmm0, $xmm1, $xmm2
|
|
; CHECK: VPEXTRBmr $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
|
|
VPEXTRBZmr $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
|
|
; CHECK: $eax = VPEXTRBrr $xmm0, 1
|
|
$eax = VPEXTRBZrr $xmm0, 1
|
|
; CHECK: VPEXTRDmr $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
|
|
VPEXTRDZmr $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
|
|
; CHECK: $eax = VPEXTRDrr $xmm0, 1
|
|
$eax = VPEXTRDZrr $xmm0, 1
|
|
; CHECK: VPEXTRQmr $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
|
|
VPEXTRQZmr $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
|
|
; CHECK: $rax = VPEXTRQrr $xmm0, 1
|
|
$rax = VPEXTRQZrr $xmm0, 1
|
|
; CHECK: VPEXTRWmr $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
|
|
VPEXTRWZmr $rdi, 1, $noreg, 0, $noreg, $xmm0, 3
|
|
; CHECK: $eax = VPEXTRWrr $xmm0, 1
|
|
$eax = VPEXTRWZrr $xmm0, 1
|
|
; CHECK: $eax = VPEXTRWrr_REV $xmm0, 1
|
|
$eax = VPEXTRWZrr_REV $xmm0, 1
|
|
; CHECK: $xmm0 = VPINSRBrm $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
|
|
$xmm0 = VPINSRBZrm $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
|
|
; CHECK: $xmm0 = VPINSRBrr $xmm0, $edi, 5
|
|
$xmm0 = VPINSRBZrr $xmm0, $edi, 5
|
|
; CHECK: $xmm0 = VPINSRDrm $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
|
|
$xmm0 = VPINSRDZrm $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
|
|
; CHECK: $xmm0 = VPINSRDrr $xmm0, $edi, 5
|
|
$xmm0 = VPINSRDZrr $xmm0, $edi, 5
|
|
; CHECK: $xmm0 = VPINSRQrm $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
|
|
$xmm0 = VPINSRQZrm $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
|
|
; CHECK: $xmm0 = VPINSRQrr $xmm0, $rdi, 5
|
|
$xmm0 = VPINSRQZrr $xmm0, $rdi, 5
|
|
; CHECK: $xmm0 = VPINSRWrm $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
|
|
$xmm0 = VPINSRWZrm $xmm0, $rsi, 1, $noreg, 0, $noreg, 3
|
|
; CHECK: $xmm0 = VPINSRWrr $xmm0, $edi, 5
|
|
$xmm0 = VPINSRWZrr $xmm0, $edi, 5
|
|
; CHECK: $xmm0 = VSQRTSDm $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VSQRTSDZm $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSQRTSDm_Int $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VSQRTSDZm_Int $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSQRTSDr $xmm0, $noreg
|
|
$xmm0 = VSQRTSDZr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VSQRTSDr_Int $xmm0, $noreg
|
|
$xmm0 = VSQRTSDZr_Int $xmm0, $noreg
|
|
; CHECK: $xmm0 = VSQRTSSm $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VSQRTSSZm $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSQRTSSm_Int $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VSQRTSSZm_Int $xmm0, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VSQRTSSr $xmm0, $noreg
|
|
$xmm0 = VSQRTSSZr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VSQRTSSr_Int $xmm0, $noreg
|
|
$xmm0 = VSQRTSSZr_Int $xmm0, $noreg
|
|
; CHECK: $rdi = VCVTSD2SI64rr_Int $xmm0
|
|
$rdi = VCVTSD2SI64Zrr_Int $xmm0
|
|
; CHECK: $edi = VCVTSD2SIrr_Int $xmm0
|
|
$edi = VCVTSD2SIZrr_Int $xmm0
|
|
; CHECK: $xmm0 = VCVTSD2SSrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSD2SSZrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSD2SSrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSD2SSZrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSD2SSrr $xmm0, $noreg
|
|
$xmm0 = VCVTSD2SSZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSD2SSrr_Int $xmm0, $noreg
|
|
$xmm0 = VCVTSD2SSZrr_Int $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI2SDrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSI2SDZrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI2SDrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSI2SDZrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI2SDrr $xmm0, $noreg
|
|
$xmm0 = VCVTSI2SDZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI2SDrr_Int $xmm0, $noreg
|
|
$xmm0 = VCVTSI2SDZrr_Int $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI2SSrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSI2SSZrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI2SSrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSI2SSZrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI2SSrr $xmm0, $noreg
|
|
$xmm0 = VCVTSI2SSZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI2SSrr_Int $xmm0, $noreg
|
|
$xmm0 = VCVTSI2SSZrr_Int $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI642SDrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSI642SDZrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI642SDrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSI642SDZrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI642SDrr $xmm0, $noreg
|
|
$xmm0 = VCVTSI642SDZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI642SDrr_Int $xmm0, $noreg
|
|
$xmm0 = VCVTSI642SDZrr_Int $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI642SSrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSI642SSZrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI642SSrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSI642SSZrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI642SSrr $xmm0, $noreg
|
|
$xmm0 = VCVTSI642SSZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSI642SSrr_Int $xmm0, $noreg
|
|
$xmm0 = VCVTSI642SSZrr_Int $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSS2SDrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSS2SDZrm $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSS2SDrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm0 = VCVTSS2SDZrm_Int $xmm0, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm0 = VCVTSS2SDrr $xmm0, $noreg
|
|
$xmm0 = VCVTSS2SDZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VCVTSS2SDrr_Int $xmm0, $noreg
|
|
$xmm0 = VCVTSS2SDZrr_Int $xmm0, $noreg
|
|
; CHECK: $rdi = VCVTSS2SI64rm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
$rdi = VCVTSS2SI64Zrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTSS2SI64rr_Int $xmm0
|
|
$rdi = VCVTSS2SI64Zrr_Int $xmm0
|
|
; CHECK: $edi = VCVTSS2SIrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
$edi = VCVTSS2SIZrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTSS2SIrr_Int $xmm0
|
|
$edi = VCVTSS2SIZrr_Int $xmm0
|
|
; CHECK: $rdi = VCVTTSD2SI64rm $rdi, $xmm0, 1, $noreg, 0
|
|
$rdi = VCVTTSD2SI64Zrm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTTSD2SI64rm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
$rdi = VCVTTSD2SI64Zrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTTSD2SI64rr $xmm0
|
|
$rdi = VCVTTSD2SI64Zrr $xmm0
|
|
; CHECK: $rdi = VCVTTSD2SI64rr_Int $xmm0
|
|
$rdi = VCVTTSD2SI64Zrr_Int $xmm0
|
|
; CHECK: $edi = VCVTTSD2SIrm $rdi, $xmm0, 1, $noreg, 0
|
|
$edi = VCVTTSD2SIZrm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTTSD2SIrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
$edi = VCVTTSD2SIZrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTTSD2SIrr $xmm0
|
|
$edi = VCVTTSD2SIZrr $xmm0
|
|
; CHECK: $edi = VCVTTSD2SIrr_Int $xmm0
|
|
$edi = VCVTTSD2SIZrr_Int $xmm0
|
|
; CHECK: $rdi = VCVTTSS2SI64rm $rdi, $xmm0, 1, $noreg, 0
|
|
$rdi = VCVTTSS2SI64Zrm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTTSS2SI64rm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
$rdi = VCVTTSS2SI64Zrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTTSS2SI64rr $xmm0
|
|
$rdi = VCVTTSS2SI64Zrr $xmm0
|
|
; CHECK: $rdi = VCVTTSS2SI64rr_Int $xmm0
|
|
$rdi = VCVTTSS2SI64Zrr_Int $xmm0
|
|
; CHECK: $edi = VCVTTSS2SIrm $rdi, $xmm0, 1, $noreg, 0
|
|
$edi = VCVTTSS2SIZrm $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTTSS2SIrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
$edi = VCVTTSS2SIZrm_Int $rdi, $xmm0, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTTSS2SIrr $xmm0
|
|
$edi = VCVTTSS2SIZrr $xmm0
|
|
; CHECK: $edi = VCVTTSS2SIrr_Int $xmm0
|
|
$edi = VCVTTSS2SIZrr_Int $xmm0
|
|
; CHECK: $xmm0 = VMOV64toSDrr $rdi
|
|
$xmm0 = VMOV64toSDZrr $rdi
|
|
; CHECK: $xmm0 = VMOVDI2SSrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VMOVDI2SSZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOVDI2SSrr $eax
|
|
$xmm0 = VMOVDI2SSZrr $eax
|
|
; CHECK: VMOVSDmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
VMOVSDZmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOVSDrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VMOVSDZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOVSDrr $xmm0, $noreg
|
|
$xmm0 = VMOVSDZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VMOVSDrr_REV $xmm0, $noreg
|
|
$xmm0 = VMOVSDZrr_REV $xmm0, $noreg
|
|
; CHECK: $rax = VMOVSDto64rr $xmm0
|
|
$rax = VMOVSDto64Zrr $xmm0
|
|
; CHECK: VMOVSDto64mr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
VMOVSDto64Zmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: VMOVSSmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
VMOVSSZmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOVSSrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VMOVSSZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOVSSrr $xmm0, $noreg
|
|
$xmm0 = VMOVSSZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VMOVSSrr_REV $xmm0, $noreg
|
|
$xmm0 = VMOVSSZrr_REV $xmm0, $noreg
|
|
; CHECK: VMOVSS2DImr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
VMOVSS2DIZmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $eax = VMOVSS2DIrr $xmm0
|
|
$eax = VMOVSS2DIZrr $xmm0
|
|
; CHECK: $xmm0 = VMOV64toPQIrr $rdi
|
|
$xmm0 = VMOV64toPQIZrr $rdi
|
|
; CHECK: $xmm0 = VMOV64toPQIrm $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VMOV64toPQIZrm $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOV64toSDrr $rdi
|
|
$xmm0 = VMOV64toSDZrr $rdi
|
|
; CHECK: $xmm0 = VMOVDI2PDIrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VMOVDI2PDIZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOVDI2PDIrr $edi
|
|
$xmm0 = VMOVDI2PDIZrr $edi
|
|
; CHECK: $xmm0 = VMOVLHPSrr $xmm0, $noreg
|
|
$xmm0 = VMOVLHPSZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VMOVHLPSrr $xmm0, $noreg
|
|
$xmm0 = VMOVHLPSZrr $xmm0, $noreg
|
|
; CHECK: VMOVPDI2DImr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
VMOVPDI2DIZmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $edi = VMOVPDI2DIrr $xmm0
|
|
$edi = VMOVPDI2DIZrr $xmm0
|
|
; CHECK: $xmm0 = VMOVPQI2QIrr $xmm0
|
|
$xmm0 = VMOVPQI2QIZrr $xmm0
|
|
; CHECK: VMOVPQI2QImr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
VMOVPQI2QIZmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $rdi = VMOVPQIto64rr $xmm0
|
|
$rdi = VMOVPQIto64Zrr $xmm0
|
|
; CHECK: VMOVPQIto64mr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
VMOVPQIto64Zmr $rdi, $xmm0, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOVQI2PQIrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VMOVQI2PQIZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VMOVZPQILo2PQIrr $xmm0
|
|
$xmm0 = VMOVZPQILo2PQIZrr $xmm0
|
|
; CHECK: VCOMISDrm_Int $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VCOMISDZrm_Int $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VCOMISDrr_Int $xmm0, $xmm1, implicit-def $eflags
|
|
VCOMISDZrr_Int $xmm0, $xmm1, implicit-def $eflags
|
|
; CHECK: VCOMISSrm_Int $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VCOMISSZrm_Int $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VCOMISSrr_Int $xmm0, $xmm1, implicit-def $eflags
|
|
VCOMISSZrr_Int $xmm0, $xmm1, implicit-def $eflags
|
|
; CHECK: VUCOMISDrm_Int $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VUCOMISDZrm_Int $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VUCOMISDrr_Int $xmm0, $xmm1, implicit-def $eflags
|
|
VUCOMISDZrr_Int $xmm0, $xmm1, implicit-def $eflags
|
|
; CHECK: VUCOMISSrm_Int $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VUCOMISSZrm_Int $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VUCOMISSrr_Int $xmm0, $xmm1, implicit-def $eflags
|
|
VUCOMISSZrr_Int $xmm0, $xmm1, implicit-def $eflags
|
|
; CHECK: VCOMISDrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VCOMISDZrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VCOMISDrr $xmm0, $xmm1, implicit-def $eflags
|
|
VCOMISDZrr $xmm0, $xmm1, implicit-def $eflags
|
|
; CHECK: VCOMISSrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VCOMISSZrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VCOMISSrr $xmm0, $xmm1, implicit-def $eflags
|
|
VCOMISSZrr $xmm0, $xmm1, implicit-def $eflags
|
|
; CHECK: VUCOMISDrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VUCOMISDZrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VUCOMISDrr $xmm0, $xmm1, implicit-def $eflags
|
|
VUCOMISDZrr $xmm0, $xmm1, implicit-def $eflags
|
|
; CHECK: VUCOMISSrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VUCOMISSZrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VUCOMISSrr $xmm0, $xmm1, implicit-def $eflags
|
|
VUCOMISSZrr $xmm0, $xmm1, implicit-def $eflags
|
|
; CHECK: VEXTRACTPSmr $rdi, 1, $noreg, 0, $noreg, $xmm0, $noreg
|
|
VEXTRACTPSZmr $rdi, 1, $noreg, 0, $noreg, $xmm0, $noreg
|
|
; CHECK: $eax = VEXTRACTPSrr $xmm0, $noreg
|
|
$eax = VEXTRACTPSZrr $xmm0, $noreg
|
|
; CHECK: $xmm0 = VINSERTPSrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm0 = VINSERTPSZrm $xmm0, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm0 = VINSERTPSrr $xmm0, $xmm0, $noreg
|
|
$xmm0 = VINSERTPSZrr $xmm0, $xmm0, $noreg
|
|
|
|
RET 0, $zmm0, $zmm1
|
|
...
|
|
---
|
|
# CHECK-LABEL: name: evex_z256_to_evex_test
|
|
# CHECK: bb.0:
|
|
|
|
name: evex_z256_to_evex_test
|
|
body: |
|
|
bb.0:
|
|
; CHECK: VMOVAPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVAPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVAPDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVAPDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVAPDZ256rr $ymm16
|
|
$ymm16 = VMOVAPDZ256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVAPDZ256rr_REV $ymm16
|
|
$ymm16 = VMOVAPDZ256rr_REV $ymm16
|
|
; CHECK: VMOVAPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVAPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVAPSZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVAPSZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVAPSZ256rr $ymm16
|
|
$ymm16 = VMOVAPSZ256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVAPSZ256rr_REV $ymm16
|
|
$ymm16 = VMOVAPSZ256rr_REV $ymm16
|
|
; CHECK: $ymm16 = VMOVDDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVDDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVDDUPZ256rr $ymm16
|
|
$ymm16 = VMOVDDUPZ256rr $ymm16
|
|
; CHECK: VMOVDQA32Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVDQA32Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVDQA32Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVDQA32Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVDQA32Z256rr $ymm16
|
|
$ymm16 = VMOVDQA32Z256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVDQA32Z256rr_REV $ymm16
|
|
$ymm16 = VMOVDQA32Z256rr_REV $ymm16
|
|
; CHECK: VMOVDQA64Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVDQA64Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVDQA64Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVDQA64Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVDQA64Z256rr $ymm16
|
|
$ymm16 = VMOVDQA64Z256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVDQA64Z256rr_REV $ymm16
|
|
$ymm16 = VMOVDQA64Z256rr_REV $ymm16
|
|
; CHECK: VMOVDQU16Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVDQU16Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVDQU16Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVDQU16Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVDQU16Z256rr $ymm16
|
|
$ymm16 = VMOVDQU16Z256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVDQU16Z256rr_REV $ymm16
|
|
$ymm16 = VMOVDQU16Z256rr_REV $ymm16
|
|
; CHECK: VMOVDQU32Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVDQU32Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVDQU32Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVDQU32Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVDQU32Z256rr $ymm16
|
|
$ymm16 = VMOVDQU32Z256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVDQU32Z256rr_REV $ymm16
|
|
$ymm16 = VMOVDQU32Z256rr_REV $ymm16
|
|
; CHECK: VMOVDQU64Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVDQU64Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVDQU64Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVDQU64Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVDQU64Z256rr $ymm16
|
|
$ymm16 = VMOVDQU64Z256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVDQU64Z256rr_REV $ymm16
|
|
$ymm16 = VMOVDQU64Z256rr_REV $ymm16
|
|
; CHECK: VMOVDQU8Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVDQU8Z256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVDQU8Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVDQU8Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVDQU8Z256rr $ymm16
|
|
$ymm16 = VMOVDQU8Z256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVDQU8Z256rr_REV $ymm16
|
|
$ymm16 = VMOVDQU8Z256rr_REV $ymm16
|
|
; CHECK: $ymm16 = VMOVNTDQAZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVNTDQAZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: VMOVNTDQZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVNTDQZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: VMOVNTPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVNTPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: VMOVNTPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVNTPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVSHDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVSHDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVSHDUPZ256rr $ymm16
|
|
$ymm16 = VMOVSHDUPZ256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVSLDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVSLDUPZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVSLDUPZ256rr $ymm16
|
|
$ymm16 = VMOVSLDUPZ256rr $ymm16
|
|
; CHECK: VMOVUPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVUPDZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VMOVUPDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMOVUPDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMOVUPDZ256rr $ymm16
|
|
$ymm16 = VMOVUPDZ256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVUPDZ256rr_REV $ymm16
|
|
$ymm16 = VMOVUPDZ256rr_REV $ymm16
|
|
; CHECK: VMOVUPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
VMOVUPSZ256mr $rdi, 1, $noreg, 0, $noreg, $ymm16
|
|
; CHECK: $ymm16 = VPANDDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPANDDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPANDDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPANDDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPANDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPANDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPANDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPANDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPANDNDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPANDNDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPANDNDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPANDNDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPANDNQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPANDNQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPANDNQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPANDNQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPAVGBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPAVGBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPAVGBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPAVGBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPAVGWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPAVGWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPAVGWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPAVGWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPADDBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPADDBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPADDBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPADDBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPADDDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPADDDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPADDDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPADDDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPADDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPADDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPADDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPADDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPADDSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPADDSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPADDSBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPADDSBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPADDSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPADDSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPADDSWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPADDSWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPADDUSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPADDUSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPADDUSBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPADDUSBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPADDUSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPADDUSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPADDUSWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPADDUSWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPADDWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPADDWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPADDWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPADDWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMULPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMULPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMULPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMULPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMULPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMULPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMULPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMULPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VORPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VORPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VORPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VORPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VORPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VORPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VORPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VORPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMADDUBSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMADDUBSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMADDUBSWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMADDUBSWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMADDWDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMADDWDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMADDWDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMADDWDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMAXSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMAXSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMAXSBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMAXSBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMAXSDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMAXSDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMAXSDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMAXSDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMAXSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMAXSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMAXSWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMAXSWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMAXUBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMAXUBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMAXUBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMAXUBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMAXUDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMAXUDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMAXUDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMAXUDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMAXUWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMAXUWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMAXUWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMAXUWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMINSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMINSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMINSBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMINSBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMINSDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMINSDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMINSDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMINSDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMINSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMINSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMINSWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMINSWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMINUBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMINUBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMINUBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMINUBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMINUDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMINUDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMINUDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMINUDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMINUWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMINUWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMINUWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMINUWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMULDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMULDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMULDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMULDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMULHRSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMULHRSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMULHRSWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMULHRSWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMULHUWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMULHUWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMULHUWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMULHUWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMULHWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMULHWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMULHWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMULHWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMULLDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMULLDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMULLDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMULLDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMULLWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMULLWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMULLWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMULLWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPMULUDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMULUDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMULUDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPMULUDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPORDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPORDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPORDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPORDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPORQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPORQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPORQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPORQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSUBBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSUBBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSUBBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSUBBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSUBDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSUBDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSUBDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSUBDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSUBQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSUBQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSUBQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSUBQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSUBSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSUBSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSUBSBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSUBSBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSUBSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSUBSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSUBSWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSUBSWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSUBUSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSUBUSBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSUBUSBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSUBUSBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSUBUSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSUBUSWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSUBUSWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSUBUSWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSUBWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSUBWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSUBWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSUBWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPXORDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPXORDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPXORDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPXORDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPXORQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPXORQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPXORQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPXORQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VADDPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VADDPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VADDPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VADDPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VADDPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VADDPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VADDPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VADDPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VANDNPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VANDNPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VANDNPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VANDNPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VANDNPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VANDNPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VANDNPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VANDNPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VANDPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VANDPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VANDPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VANDPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VANDPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VANDPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VANDPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VANDPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VDIVPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VDIVPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VDIVPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VDIVPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VDIVPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VDIVPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VDIVPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VDIVPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMAXCPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMAXCPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMAXCPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMAXCPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMAXCPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMAXCPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMAXCPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMAXCPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMAXPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMAXPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMAXPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMAXPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMAXPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMAXPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMAXPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMAXPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMINCPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMINCPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMINCPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMINCPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMINCPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMINCPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMINCPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMINCPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMINPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMINPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMINPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMINPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VMINPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VMINPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VMINPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VMINPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VXORPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VXORPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VXORPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VXORPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VXORPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VXORPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VXORPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VXORPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPACKSSDWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPACKSSDWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPACKSSDWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPACKSSDWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPACKSSWBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPACKSSWBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPACKSSWBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPACKSSWBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPACKUSDWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPACKUSDWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPACKUSDWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPACKUSDWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPACKUSWBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPACKUSWBZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPACKUSWBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPACKUSWBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VUNPCKHPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VUNPCKHPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VUNPCKHPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VUNPCKHPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VUNPCKHPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VUNPCKHPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VUNPCKHPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VUNPCKHPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VUNPCKLPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VUNPCKLPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VUNPCKLPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VUNPCKLPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VUNPCKLPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VUNPCKLPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VUNPCKLPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VUNPCKLPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VSUBPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VSUBPDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VSUBPDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VSUBPDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VSUBPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VSUBPSZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VSUBPSZ256rr $ymm16, $ymm1
|
|
$ymm16 = VSUBPSZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPUNPCKHBWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPUNPCKHBWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPUNPCKHBWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPUNPCKHBWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPUNPCKHDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPUNPCKHDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPUNPCKHDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPUNPCKHDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPUNPCKHQDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPUNPCKHQDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPUNPCKHQDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPUNPCKHQDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPUNPCKHWDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPUNPCKHWDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPUNPCKHWDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPUNPCKHWDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPUNPCKLBWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPUNPCKLBWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPUNPCKLBWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPUNPCKLBWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPUNPCKLDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPUNPCKLDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPUNPCKLDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPUNPCKLDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPUNPCKLQDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPUNPCKLQDQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPUNPCKLQDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPUNPCKLQDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPUNPCKLWDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPUNPCKLWDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPUNPCKLWDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPUNPCKLWDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VFMADD132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADD132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADD132PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADD132PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADD132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADD132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADD132PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADD132PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADD213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADD213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADD213PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADD213PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADD213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADD213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADD213PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADD213PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADD231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADD231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADD231PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADD231PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADD231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADD231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADD231PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADD231PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADDSUB132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADDSUB132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADDSUB132PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADDSUB132PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADDSUB132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADDSUB132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADDSUB132PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADDSUB132PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADDSUB213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADDSUB213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADDSUB213PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADDSUB213PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADDSUB213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADDSUB213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADDSUB213PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADDSUB213PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADDSUB231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADDSUB231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADDSUB231PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADDSUB231PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMADDSUB231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMADDSUB231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMADDSUB231PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMADDSUB231PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUB132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUB132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUB132PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUB132PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUB132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUB132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUB132PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUB132PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUB213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUB213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUB213PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUB213PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUB213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUB213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUB213PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUB213PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUB231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUB231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUB231PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUB231PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUB231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUB231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUB231PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUB231PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUBADD132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUBADD132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUBADD132PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUBADD132PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUBADD132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUBADD132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUBADD132PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUBADD132PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUBADD213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUBADD213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUBADD213PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUBADD213PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUBADD213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUBADD213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUBADD213PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUBADD213PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUBADD231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUBADD231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUBADD231PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUBADD231PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFMSUBADD231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFMSUBADD231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFMSUBADD231PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFMSUBADD231PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMADD132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMADD132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMADD132PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMADD132PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMADD132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMADD132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMADD132PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMADD132PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMADD213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMADD213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMADD213PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMADD213PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMADD213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMADD213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMADD213PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMADD213PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMADD231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMADD231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMADD231PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMADD231PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMADD231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMADD231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMADD231PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMADD231PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMSUB132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMSUB132PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMSUB132PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMSUB132PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMSUB132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMSUB132PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMSUB132PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMSUB132PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMSUB213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMSUB213PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMSUB213PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMSUB213PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMSUB213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMSUB213PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMSUB213PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMSUB213PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMSUB231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMSUB231PDZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMSUB231PDZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMSUB231PDZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VFNMSUB231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VFNMSUB231PSZ256m $ymm16, $ymm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VFNMSUB231PSZ256r $ymm16, $ymm1, $ymm2
|
|
$ymm16 = VFNMSUB231PSZ256r $ymm16, $ymm1, $ymm2
|
|
; CHECK: $ymm16 = VPSRADZ256ri $ymm16, 7
|
|
$ymm16 = VPSRADZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSRADZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSRADZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSRADZ256rr $ymm16, $xmm1
|
|
$ymm16 = VPSRADZ256rr $ymm16, $xmm1
|
|
; CHECK: $ymm16 = VPSRAVDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSRAVDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSRAVDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSRAVDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSRAWZ256ri $ymm16, 7
|
|
$ymm16 = VPSRAWZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSRAWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSRAWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSRAWZ256rr $ymm16, $xmm1
|
|
$ymm16 = VPSRAWZ256rr $ymm16, $xmm1
|
|
; CHECK: $ymm16 = VPSRLDQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSRLDQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSRLDZ256ri $ymm16, 7
|
|
$ymm16 = VPSRLDZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSRLDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSRLDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSRLDZ256rr $ymm16, $xmm1
|
|
$ymm16 = VPSRLDZ256rr $ymm16, $xmm1
|
|
; CHECK: $ymm16 = VPSRLQZ256ri $ymm16, 7
|
|
$ymm16 = VPSRLQZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSRLQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSRLQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSRLQZ256rr $ymm16, $xmm1
|
|
$ymm16 = VPSRLQZ256rr $ymm16, $xmm1
|
|
; CHECK: $ymm16 = VPSRLVDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSRLVDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSRLVDZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSRLVDZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSRLVQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSRLVQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSRLVQZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSRLVQZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSRLWZ256ri $ymm16, 7
|
|
$ymm16 = VPSRLWZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSRLWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSRLWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSRLWZ256rr $ymm16, $xmm1
|
|
$ymm16 = VPSRLWZ256rr $ymm16, $xmm1
|
|
; CHECK: $ymm16 = VPMOVSXBDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVSXBDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVSXBDZ256rr $xmm0
|
|
$ymm16 = VPMOVSXBDZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVSXBQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVSXBQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVSXBQZ256rr $xmm0
|
|
$ymm16 = VPMOVSXBQZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVSXBWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVSXBWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVSXBWZ256rr $xmm0
|
|
$ymm16 = VPMOVSXBWZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVSXDQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVSXDQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVSXDQZ256rr $xmm0
|
|
$ymm16 = VPMOVSXDQZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVSXWDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVSXWDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVSXWDZ256rr $xmm0
|
|
$ymm16 = VPMOVSXWDZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVSXWQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVSXWQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVSXWQZ256rr $xmm0
|
|
$ymm16 = VPMOVSXWQZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVZXBDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVZXBDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVZXBDZ256rr $xmm0
|
|
$ymm16 = VPMOVZXBDZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVZXBQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVZXBQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVZXBQZ256rr $xmm0
|
|
$ymm16 = VPMOVZXBQZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVZXBWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVZXBWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVZXBWZ256rr $xmm0
|
|
$ymm16 = VPMOVZXBWZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVZXDQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVZXDQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVZXDQZ256rr $xmm0
|
|
$ymm16 = VPMOVZXDQZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVZXWDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVZXWDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVZXWDZ256rr $xmm0
|
|
$ymm16 = VPMOVZXWDZ256rr $xmm0
|
|
; CHECK: $ymm16 = VPMOVZXWQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPMOVZXWQZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPMOVZXWQZ256rr $xmm0
|
|
$ymm16 = VPMOVZXWQZ256rr $xmm0
|
|
; CHECK: $ymm16 = VBROADCASTF32X2Z256m $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VBROADCASTF32X2Z256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VBROADCASTF32X2Z256r $xmm16
|
|
$ymm16 = VBROADCASTF32X2Z256r $xmm16
|
|
; CHECK: $ymm16 = VBROADCASTF32X4Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VBROADCASTF32X4Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VBROADCASTSDZ256m $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VBROADCASTSDZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VBROADCASTSDZ256r $xmm0
|
|
$ymm16 = VBROADCASTSDZ256r $xmm0
|
|
; CHECK: $ymm16 = VBROADCASTSSZ256m $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VBROADCASTSSZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VBROADCASTSSZ256r $xmm0
|
|
$ymm16 = VBROADCASTSSZ256r $xmm0
|
|
; CHECK: $ymm16 = VPBROADCASTBZ256m $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPBROADCASTBZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPBROADCASTBZ256r $xmm0
|
|
$ymm16 = VPBROADCASTBZ256r $xmm0
|
|
; CHECK: $ymm16 = VPBROADCASTDZ256m $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPBROADCASTDZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPBROADCASTDZ256r $xmm0
|
|
$ymm16 = VPBROADCASTDZ256r $xmm0
|
|
; CHECK: $ymm16 = VPBROADCASTWZ256m $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPBROADCASTWZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPBROADCASTWZ256r $xmm0
|
|
$ymm16 = VPBROADCASTWZ256r $xmm0
|
|
; CHECK: $ymm16 = VBROADCASTI32X4Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VBROADCASTI32X4Z256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VBROADCASTI32X2Z256m $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VBROADCASTI32X2Z256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VBROADCASTI32X2Z256r $xmm16
|
|
$ymm16 = VBROADCASTI32X2Z256r $xmm16
|
|
; CHECK: $ymm16 = VPBROADCASTQZ256m $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPBROADCASTQZ256m $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPBROADCASTQZ256r $xmm0
|
|
$ymm16 = VPBROADCASTQZ256r $xmm0
|
|
; CHECK: $ymm16 = VPABSBZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPABSBZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPABSBZ256rr $ymm16
|
|
$ymm16 = VPABSBZ256rr $ymm16
|
|
; CHECK: $ymm16 = VPABSDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPABSDZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPABSDZ256rr $ymm16
|
|
$ymm16 = VPABSDZ256rr $ymm16
|
|
; CHECK: $ymm16 = VPABSWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPABSWZ256rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPABSWZ256rr $ymm16
|
|
$ymm16 = VPABSWZ256rr $ymm16
|
|
; CHECK: $ymm16 = VPSADBWZ256rm $ymm16, 1, $noreg, $rax, $noreg, $noreg
|
|
$ymm16 = VPSADBWZ256rm $ymm16, 1, $noreg, $rax, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPSADBWZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSADBWZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPERMDZ256rm $ymm16, $rdi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VPERMDZ256rm $ymm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VPERMDZ256rr $ymm1, $ymm16
|
|
$ymm16 = VPERMDZ256rr $ymm1, $ymm16
|
|
; CHECK: $ymm16 = VPERMILPDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm16 = VPERMILPDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPERMILPDZ256ri $ymm16, 7
|
|
$ymm16 = VPERMILPDZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPERMILPDZ256rm $ymm16, $rdi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VPERMILPDZ256rm $ymm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VPERMILPDZ256rr $ymm1, $ymm16
|
|
$ymm16 = VPERMILPDZ256rr $ymm1, $ymm16
|
|
; CHECK: $ymm16 = VPERMILPSZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm16 = VPERMILPSZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPERMILPSZ256ri $ymm16, 7
|
|
$ymm16 = VPERMILPSZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPERMILPSZ256rm $ymm16, $rdi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VPERMILPSZ256rm $ymm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VPERMILPSZ256rr $ymm1, $ymm16
|
|
$ymm16 = VPERMILPSZ256rr $ymm1, $ymm16
|
|
; CHECK: $ymm16 = VPERMPDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm16 = VPERMPDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPERMPDZ256ri $ymm16, 7
|
|
$ymm16 = VPERMPDZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPERMPSZ256rm $ymm16, $rdi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VPERMPSZ256rm $ymm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VPERMPSZ256rr $ymm1, $ymm16
|
|
$ymm16 = VPERMPSZ256rr $ymm1, $ymm16
|
|
; CHECK: $ymm16 = VPERMQZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm16 = VPERMQZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPERMQZ256ri $ymm16, 7
|
|
$ymm16 = VPERMQZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSLLDQZ256rr $ymm16, 14
|
|
$ymm16 = VPSLLDQZ256rr $ymm16, 14
|
|
; CHECK: $ymm16 = VPSLLDZ256ri $ymm16, 7
|
|
$ymm16 = VPSLLDZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSLLDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSLLDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSLLDZ256rr $ymm16, 14
|
|
$ymm16 = VPSLLDZ256rr $ymm16, 14
|
|
; CHECK: $ymm16 = VPSLLQZ256ri $ymm16, 7
|
|
$ymm16 = VPSLLQZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSLLQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSLLQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSLLQZ256rr $ymm16, 14
|
|
$ymm16 = VPSLLQZ256rr $ymm16, 14
|
|
; CHECK: $ymm16 = VPSLLVDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSLLVDZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSLLVDZ256rr $ymm16, 14
|
|
$ymm16 = VPSLLVDZ256rr $ymm16, 14
|
|
; CHECK: $ymm16 = VPSLLVQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSLLVQZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSLLVQZ256rr $ymm16, 14
|
|
$ymm16 = VPSLLVQZ256rr $ymm16, 14
|
|
; CHECK: $ymm16 = VPSLLWZ256ri $ymm16, 7
|
|
$ymm16 = VPSLLWZ256ri $ymm16, 7
|
|
; CHECK: $ymm16 = VPSLLWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
$ymm16 = VPSLLWZ256rm $ymm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $ymm16 = VPSLLWZ256rr $ymm16, 14
|
|
$ymm16 = VPSLLWZ256rr $ymm16, 14
|
|
; CHECK: $ymm16 = VCVTDQ2PDZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$ymm16 = VCVTDQ2PDZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $ymm16 = VCVTDQ2PDZ256rr $xmm0
|
|
$ymm16 = VCVTDQ2PDZ256rr $xmm0
|
|
; CHECK: $ymm16 = VCVTDQ2PSZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$ymm16 = VCVTDQ2PSZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $ymm16 = VCVTDQ2PSZ256rr $ymm16
|
|
$ymm16 = VCVTDQ2PSZ256rr $ymm16
|
|
; CHECK: $xmm0 = VCVTPD2DQZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$xmm0 = VCVTPD2DQZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPD2DQZ256rr $ymm16
|
|
$xmm0 = VCVTPD2DQZ256rr $ymm16
|
|
; CHECK: $xmm0 = VCVTPD2PSZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$xmm0 = VCVTPD2PSZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTPD2PSZ256rr $ymm16
|
|
$xmm0 = VCVTPD2PSZ256rr $ymm16
|
|
; CHECK: $ymm16 = VCVTPS2DQZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$ymm16 = VCVTPS2DQZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $ymm16 = VCVTPS2DQZ256rr $ymm16
|
|
$ymm16 = VCVTPS2DQZ256rr $ymm16
|
|
; CHECK: $ymm16 = VCVTPS2PDZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$ymm16 = VCVTPS2PDZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $ymm16 = VCVTPS2PDZ256rr $xmm0
|
|
$ymm16 = VCVTPS2PDZ256rr $xmm0
|
|
; CHECK: VCVTPS2PHZ256mr $rdi, $ymm16, 1, $noreg, 0, $noreg, $noreg
|
|
VCVTPS2PHZ256mr $rdi, $ymm16, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm0 = VCVTPS2PHZ256rr $ymm16, $noreg
|
|
$xmm0 = VCVTPS2PHZ256rr $ymm16, $noreg
|
|
; CHECK: $ymm16 = VCVTPH2PSZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$ymm16 = VCVTPH2PSZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $ymm16 = VCVTPH2PSZ256rr $xmm0
|
|
$ymm16 = VCVTPH2PSZ256rr $xmm0
|
|
; CHECK: $xmm0 = VCVTTPD2DQZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$xmm0 = VCVTTPD2DQZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $xmm0 = VCVTTPD2DQZ256rr $ymm16
|
|
$xmm0 = VCVTTPD2DQZ256rr $ymm16
|
|
; CHECK: $ymm16 = VCVTTPS2DQZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
$ymm16 = VCVTTPS2DQZ256rm $rdi, $ymm16, 1, $noreg, 0
|
|
; CHECK: $ymm16 = VCVTTPS2DQZ256rr $ymm16
|
|
$ymm16 = VCVTTPS2DQZ256rr $ymm16
|
|
; CHECK: $ymm16 = VSQRTPDZ256m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$ymm16 = VSQRTPDZ256m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm16 = VSQRTPDZ256r $ymm16
|
|
$ymm16 = VSQRTPDZ256r $ymm16
|
|
; CHECK: $ymm16 = VSQRTPSZ256m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$ymm16 = VSQRTPSZ256m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm16 = VSQRTPSZ256r $ymm16
|
|
$ymm16 = VSQRTPSZ256r $ymm16
|
|
; CHECK: $ymm16 = VPALIGNRZ256rmi $ymm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$ymm16 = VPALIGNRZ256rmi $ymm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPALIGNRZ256rri $ymm16, $ymm1, $noreg
|
|
$ymm16 = VPALIGNRZ256rri $ymm16, $ymm1, $noreg
|
|
; CHECK: $ymm16 = VMOVUPSZ256rm $rdi, 1, $noreg, 0, $noreg
|
|
$ymm16 = VMOVUPSZ256rm $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $ymm16 = VMOVUPSZ256rr $ymm16
|
|
$ymm16 = VMOVUPSZ256rr $ymm16
|
|
; CHECK: $ymm16 = VMOVUPSZ256rr_REV $ymm16
|
|
$ymm16 = VMOVUPSZ256rr_REV $ymm16
|
|
; CHECK: $ymm16 = VPSHUFBZ256rm $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$ymm16 = VPSHUFBZ256rm $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPSHUFBZ256rr $ymm16, $ymm1
|
|
$ymm16 = VPSHUFBZ256rr $ymm16, $ymm1
|
|
; CHECK: $ymm16 = VPSHUFDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm16 = VPSHUFDZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPSHUFDZ256ri $ymm16, -24
|
|
$ymm16 = VPSHUFDZ256ri $ymm16, -24
|
|
; CHECK: $ymm16 = VPSHUFHWZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm16 = VPSHUFHWZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPSHUFHWZ256ri $ymm16, -24
|
|
$ymm16 = VPSHUFHWZ256ri $ymm16, -24
|
|
; CHECK: $ymm16 = VPSHUFLWZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$ymm16 = VPSHUFLWZ256mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $ymm16 = VPSHUFLWZ256ri $ymm16, -24
|
|
$ymm16 = VPSHUFLWZ256ri $ymm16, -24
|
|
; CHECK: $ymm16 = VSHUFPDZ256rmi $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$ymm16 = VSHUFPDZ256rmi $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm16 = VSHUFPDZ256rri $ymm16, $noreg, $noreg
|
|
$ymm16 = VSHUFPDZ256rri $ymm16, $noreg, $noreg
|
|
; CHECK: $ymm16 = VSHUFPSZ256rmi $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$ymm16 = VSHUFPSZ256rmi $ymm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $ymm16 = VSHUFPSZ256rri $ymm16, $noreg, $noreg
|
|
$ymm16 = VSHUFPSZ256rri $ymm16, $noreg, $noreg
|
|
|
|
RET 0, $zmm0, $zmm1
|
|
...
|
|
---
|
|
# CHECK-LABEL: name: evex_z128_to_evex_test
|
|
# CHECK: bb.0:
|
|
|
|
name: evex_z128_to_evex_test
|
|
body: |
|
|
bb.0:
|
|
; CHECK: VMOVAPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVAPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVAPDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVAPDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVAPDZ128rr $xmm16
|
|
$xmm16 = VMOVAPDZ128rr $xmm16
|
|
; CHECK: VMOVAPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVAPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVAPSZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVAPSZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVAPSZ128rr $xmm16
|
|
$xmm16 = VMOVAPSZ128rr $xmm16
|
|
; CHECK: VMOVDQA32Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVDQA32Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVDQA32Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVDQA32Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVDQA32Z128rr $xmm16
|
|
$xmm16 = VMOVDQA32Z128rr $xmm16
|
|
; CHECK: VMOVDQA64Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVDQA64Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVDQA64Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVDQA64Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVDQA64Z128rr $xmm16
|
|
$xmm16 = VMOVDQA64Z128rr $xmm16
|
|
; CHECK: VMOVDQU16Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVDQU16Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVDQU16Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVDQU16Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVDQU16Z128rr $xmm16
|
|
$xmm16 = VMOVDQU16Z128rr $xmm16
|
|
; CHECK: VMOVDQU32Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVDQU32Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVDQU32Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVDQU32Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVDQU32Z128rr $xmm16
|
|
$xmm16 = VMOVDQU32Z128rr $xmm16
|
|
; CHECK: VMOVDQU64Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVDQU64Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVDQU64Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVDQU64Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVDQU64Z128rr $xmm16
|
|
$xmm16 = VMOVDQU64Z128rr $xmm16
|
|
; CHECK: VMOVDQU8Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVDQU8Z128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVDQU8Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVDQU8Z128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVDQU8Z128rr $xmm16
|
|
$xmm16 = VMOVDQU8Z128rr $xmm16
|
|
; CHECK: $xmm16 = VMOVDQU8Z128rr_REV $xmm16
|
|
$xmm16 = VMOVDQU8Z128rr_REV $xmm16
|
|
; CHECK: $xmm16 = VMOVNTDQAZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVNTDQAZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: VMOVUPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVUPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVUPDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVUPDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVUPDZ128rr $xmm16
|
|
$xmm16 = VMOVUPDZ128rr $xmm16
|
|
; CHECK: $xmm16 = VMOVUPDZ128rr_REV $xmm16
|
|
$xmm16 = VMOVUPDZ128rr_REV $xmm16
|
|
; CHECK: VMOVUPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVUPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVUPSZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMOVUPSZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMOVUPSZ128rr $xmm16
|
|
$xmm16 = VMOVUPSZ128rr $xmm16
|
|
; CHECK: $xmm16 = VMOVUPSZ128rr_REV $xmm16
|
|
$xmm16 = VMOVUPSZ128rr_REV $xmm16
|
|
; CHECK: VMOVNTDQZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVNTDQZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: VMOVNTPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVNTPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: VMOVNTPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVNTPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVAPDZ128rr_REV $xmm16
|
|
$xmm16 = VMOVAPDZ128rr_REV $xmm16
|
|
; CHECK: $xmm16 = VMOVAPSZ128rr_REV $xmm16
|
|
$xmm16 = VMOVAPSZ128rr_REV $xmm16
|
|
; CHECK: $xmm16 = VMOVDQA32Z128rr_REV $xmm16
|
|
$xmm16 = VMOVDQA32Z128rr_REV $xmm16
|
|
; CHECK: $xmm16 = VMOVDQA64Z128rr_REV $xmm16
|
|
$xmm16 = VMOVDQA64Z128rr_REV $xmm16
|
|
; CHECK: $xmm16 = VMOVDQU16Z128rr_REV $xmm16
|
|
$xmm16 = VMOVDQU16Z128rr_REV $xmm16
|
|
; CHECK: $xmm16 = VMOVDQU32Z128rr_REV $xmm16
|
|
$xmm16 = VMOVDQU32Z128rr_REV $xmm16
|
|
; CHECK: $xmm16 = VMOVDQU64Z128rr_REV $xmm16
|
|
$xmm16 = VMOVDQU64Z128rr_REV $xmm16
|
|
; CHECK: $xmm16 = VPMOVSXBDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVSXBDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVSXBDZ128rr $xmm16
|
|
$xmm16 = VPMOVSXBDZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVSXBQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVSXBQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVSXBQZ128rr $xmm16
|
|
$xmm16 = VPMOVSXBQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVSXBWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVSXBWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVSXBWZ128rr $xmm16
|
|
$xmm16 = VPMOVSXBWZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVSXDQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVSXDQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVSXDQZ128rr $xmm16
|
|
$xmm16 = VPMOVSXDQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVSXWDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVSXWDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVSXWDZ128rr $xmm16
|
|
$xmm16 = VPMOVSXWDZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVSXWQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVSXWQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVSXWQZ128rr $xmm16
|
|
$xmm16 = VPMOVSXWQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVZXBDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVZXBDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVZXBDZ128rr $xmm16
|
|
$xmm16 = VPMOVZXBDZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVZXBQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVZXBQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVZXBQZ128rr $xmm16
|
|
$xmm16 = VPMOVZXBQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVZXBWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVZXBWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVZXBWZ128rr $xmm16
|
|
$xmm16 = VPMOVZXBWZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVZXDQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVZXDQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVZXDQZ128rr $xmm16
|
|
$xmm16 = VPMOVZXDQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVZXWDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVZXWDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVZXWDZ128rr $xmm16
|
|
$xmm16 = VPMOVZXWDZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPMOVZXWQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMOVZXWQZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMOVZXWQZ128rr $xmm16
|
|
$xmm16 = VPMOVZXWQZ128rr $xmm16
|
|
; CHECK: VMOVHPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVHPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVHPDZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VMOVHPDZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: VMOVHPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVHPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVHPSZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VMOVHPSZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: VMOVLPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVLPDZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVLPDZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VMOVLPDZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: VMOVLPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
VMOVLPSZ128mr $rdi, 1, $noreg, 0, $noreg, $xmm16
|
|
; CHECK: $xmm16 = VMOVLPSZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VMOVLPSZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VMAXCPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXCPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXCPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMAXCPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXCPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXCPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXCPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMAXCPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMAXPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMAXPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINCPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINCPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINCPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMINCPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINCPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINCPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINCPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMINCPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMINPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMINPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMULPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMULPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMULPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMULPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMULPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMULPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMULPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VMULPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VORPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VORPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VORPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VORPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VORPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VORPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VORPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VORPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPADDBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPADDBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPADDBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPADDBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPADDDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPADDDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPADDDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPADDDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPADDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPADDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPADDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPADDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPADDSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPADDSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPADDSBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPADDSBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPADDSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPADDSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPADDSWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPADDSWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPADDUSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPADDUSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPADDUSBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPADDUSBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPADDUSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPADDUSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPADDUSWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPADDUSWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPADDWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPADDWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPADDWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPADDWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPANDDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPANDDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPANDDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPANDDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPANDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPANDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPANDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPANDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPANDNDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPANDNDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPANDNDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPANDNDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPANDNQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPANDNQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPANDNQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPANDNQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPAVGBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPAVGBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPAVGBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPAVGBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPAVGWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPAVGWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPAVGWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPAVGWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMAXSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMAXSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMAXSBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMAXSBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMAXSDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMAXSDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMAXSDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMAXSDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMAXSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMAXSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMAXSWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMAXSWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMAXUBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMAXUBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMAXUBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMAXUBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMAXUDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMAXUDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMAXUDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMAXUDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMAXUWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMAXUWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMAXUWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMAXUWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMINSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMINSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMINSBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMINSBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMINSDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMINSDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMINSDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMINSDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMINSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMINSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMINSWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMINSWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMINUBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMINUBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMINUBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMINUBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMINUDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMINUDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMINUDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMINUDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMINUWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMINUWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMINUWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMINUWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMULDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMULDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMULDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMULDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMULHRSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMULHRSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMULHRSWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMULHRSWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMULHUWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMULHUWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMULHUWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMULHUWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMULHWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMULHWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMULHWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMULHWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMULLDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMULLDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMULLDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMULLDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMULLWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMULLWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMULLWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMULLWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMULUDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMULUDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMULUDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMULUDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPORDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPORDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPORDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPORDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPORQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPORQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPORQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPORQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSUBBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSUBBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSUBBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSUBBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSUBDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSUBDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSUBDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSUBDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSUBQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSUBQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSUBQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSUBQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSUBSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSUBSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSUBSBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSUBSBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSUBSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSUBSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSUBSWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSUBSWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSUBUSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSUBUSBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSUBUSBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSUBUSBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSUBUSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSUBUSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSUBUSWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSUBUSWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSUBWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSUBWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSUBWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSUBWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VADDPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VADDPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VADDPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VADDPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VADDPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VADDPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VADDPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VADDPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VANDNPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VANDNPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VANDNPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VANDNPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VANDNPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VANDNPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VANDNPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VANDNPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VANDPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VANDPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VANDPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VANDPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VANDPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VANDPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VANDPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VANDPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VDIVPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VDIVPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VDIVPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VDIVPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VDIVPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VDIVPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VDIVPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VDIVPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPXORDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPXORDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPXORDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPXORDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPXORQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPXORQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPXORQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPXORQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VSUBPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VSUBPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VSUBPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VSUBPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VSUBPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VSUBPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VSUBPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VSUBPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VXORPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VXORPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VXORPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VXORPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VXORPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VXORPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VXORPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VXORPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMADDUBSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMADDUBSWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMADDUBSWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMADDUBSWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPMADDWDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPMADDWDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPMADDWDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPMADDWDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPACKSSDWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPACKSSDWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPACKSSDWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPACKSSDWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPACKSSWBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPACKSSWBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPACKSSWBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPACKSSWBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPACKUSDWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPACKUSDWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPACKUSDWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPACKUSDWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPACKUSWBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPACKUSWBZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPACKUSWBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPACKUSWBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPUNPCKHBWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPUNPCKHBWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPUNPCKHBWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPUNPCKHBWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPUNPCKHDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPUNPCKHDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPUNPCKHDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPUNPCKHDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPUNPCKHQDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPUNPCKHQDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPUNPCKHQDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPUNPCKHQDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPUNPCKHWDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPUNPCKHWDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPUNPCKHWDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPUNPCKHWDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPUNPCKLBWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPUNPCKLBWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPUNPCKLBWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPUNPCKLBWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPUNPCKLDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPUNPCKLDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPUNPCKLDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPUNPCKLDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPUNPCKLQDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPUNPCKLQDQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPUNPCKLQDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPUNPCKLQDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPUNPCKLWDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPUNPCKLWDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPUNPCKLWDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPUNPCKLWDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VUNPCKHPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VUNPCKHPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VUNPCKHPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VUNPCKHPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VUNPCKHPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VUNPCKHPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VUNPCKHPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VUNPCKHPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VUNPCKLPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VUNPCKLPDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VUNPCKLPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VUNPCKLPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VUNPCKLPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VUNPCKLPSZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VUNPCKLPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VUNPCKLPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VFMADD132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD132PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD132PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD132PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD132PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD213PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD213PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD213PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD213PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD231PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD231PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD231PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD231PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADDSUB132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADDSUB132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADDSUB132PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADDSUB132PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADDSUB132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADDSUB132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADDSUB132PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADDSUB132PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADDSUB213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADDSUB213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADDSUB213PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADDSUB213PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADDSUB213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADDSUB213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADDSUB213PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADDSUB213PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADDSUB231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADDSUB231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADDSUB231PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADDSUB231PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADDSUB231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADDSUB231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADDSUB231PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADDSUB231PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB132PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB132PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB132PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB132PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB213PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB213PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB213PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB213PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB231PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB231PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB231PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB231PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUBADD132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUBADD132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUBADD132PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUBADD132PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUBADD132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUBADD132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUBADD132PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUBADD132PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUBADD213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUBADD213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUBADD213PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUBADD213PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUBADD213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUBADD213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUBADD213PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUBADD213PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUBADD231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUBADD231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUBADD231PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUBADD231PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUBADD231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUBADD231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUBADD231PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUBADD231PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD132PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD132PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD132PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD132PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD213PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD213PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD213PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD213PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD231PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD231PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD231PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD231PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB132PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB132PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB132PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB132PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB132PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB132PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB213PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB213PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB213PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB213PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB213PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB213PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB231PDZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB231PDZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB231PDZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB231PSZ128m $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB231PSZ128r $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB231PSZ128r $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VPSLLDZ128ri $xmm16, 7
|
|
$xmm16 = VPSLLDZ128ri $xmm16, 7
|
|
; CHECK: $xmm16 = VPSLLDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSLLDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSLLDZ128rr $xmm16, 14
|
|
$xmm16 = VPSLLDZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSLLQZ128ri $xmm16, 7
|
|
$xmm16 = VPSLLQZ128ri $xmm16, 7
|
|
; CHECK: $xmm16 = VPSLLQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSLLQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSLLQZ128rr $xmm16, 14
|
|
$xmm16 = VPSLLQZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSLLVDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSLLVDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSLLVDZ128rr $xmm16, 14
|
|
$xmm16 = VPSLLVDZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSLLVQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSLLVQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSLLVQZ128rr $xmm16, 14
|
|
$xmm16 = VPSLLVQZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSLLWZ128ri $xmm16, 7
|
|
$xmm16 = VPSLLWZ128ri $xmm16, 7
|
|
; CHECK: $xmm16 = VPSLLWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSLLWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSLLWZ128rr $xmm16, 14
|
|
$xmm16 = VPSLLWZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRADZ128ri $xmm16, 7
|
|
$xmm16 = VPSRADZ128ri $xmm16, 7
|
|
; CHECK: $xmm16 = VPSRADZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSRADZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSRADZ128rr $xmm16, 14
|
|
$xmm16 = VPSRADZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRAVDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSRAVDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSRAVDZ128rr $xmm16, 14
|
|
$xmm16 = VPSRAVDZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRAWZ128ri $xmm16, 7
|
|
$xmm16 = VPSRAWZ128ri $xmm16, 7
|
|
; CHECK: $xmm16 = VPSRAWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSRAWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSRAWZ128rr $xmm16, 14
|
|
$xmm16 = VPSRAWZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRLDQZ128rr $xmm16, 14
|
|
$xmm16 = VPSRLDQZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRLDZ128ri $xmm16, 7
|
|
$xmm16 = VPSRLDZ128ri $xmm16, 7
|
|
; CHECK: $xmm16 = VPSRLDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSRLDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSRLDZ128rr $xmm16, 14
|
|
$xmm16 = VPSRLDZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRLQZ128ri $xmm16, 7
|
|
$xmm16 = VPSRLQZ128ri $xmm16, 7
|
|
; CHECK: $xmm16 = VPSRLQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSRLQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSRLQZ128rr $xmm16, 14
|
|
$xmm16 = VPSRLQZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRLVDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSRLVDZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSRLVDZ128rr $xmm16, 14
|
|
$xmm16 = VPSRLVDZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRLVQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSRLVQZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSRLVQZ128rr $xmm16, 14
|
|
$xmm16 = VPSRLVQZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPSRLWZ128ri $xmm16, 7
|
|
$xmm16 = VPSRLWZ128ri $xmm16, 7
|
|
; CHECK: $xmm16 = VPSRLWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPSRLWZ128rm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPSRLWZ128rr $xmm16, 14
|
|
$xmm16 = VPSRLWZ128rr $xmm16, 14
|
|
; CHECK: $xmm16 = VPERMILPDZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm16 = VPERMILPDZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPERMILPDZ128ri $xmm16, 9
|
|
$xmm16 = VPERMILPDZ128ri $xmm16, 9
|
|
; CHECK: $xmm16 = VPERMILPDZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VPERMILPDZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VPERMILPDZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPERMILPDZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPERMILPSZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm16 = VPERMILPSZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPERMILPSZ128ri $xmm16, 9
|
|
$xmm16 = VPERMILPSZ128ri $xmm16, 9
|
|
; CHECK: $xmm16 = VPERMILPSZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VPERMILPSZ128rm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VPERMILPSZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPERMILPSZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VCVTPH2PSZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTPH2PSZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTPH2PSZ128rr $xmm16
|
|
$xmm16 = VCVTPH2PSZ128rr $xmm16
|
|
; CHECK: $xmm16 = VCVTDQ2PDZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTDQ2PDZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTDQ2PDZ128rr $xmm16
|
|
$xmm16 = VCVTDQ2PDZ128rr $xmm16
|
|
; CHECK: $xmm16 = VCVTDQ2PSZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTDQ2PSZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTDQ2PSZ128rr $xmm16
|
|
$xmm16 = VCVTDQ2PSZ128rr $xmm16
|
|
; CHECK: $xmm16 = VCVTPD2DQZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTPD2DQZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTPD2DQZ128rr $xmm16
|
|
$xmm16 = VCVTPD2DQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VCVTPD2PSZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTPD2PSZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTPD2PSZ128rr $xmm16
|
|
$xmm16 = VCVTPD2PSZ128rr $xmm16
|
|
; CHECK: $xmm16 = VCVTPS2DQZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTPS2DQZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTPS2DQZ128rr $xmm16
|
|
$xmm16 = VCVTPS2DQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VCVTPS2PDZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTPS2PDZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTPS2PDZ128rr $xmm16
|
|
$xmm16 = VCVTPS2PDZ128rr $xmm16
|
|
; CHECK: $xmm16 = VCVTTPD2DQZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTTPD2DQZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTTPD2DQZ128rr $xmm16
|
|
$xmm16 = VCVTTPD2DQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VCVTTPS2DQZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
$xmm16 = VCVTTPS2DQZ128rm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $xmm16 = VCVTTPS2DQZ128rr $xmm16
|
|
$xmm16 = VCVTTPS2DQZ128rr $xmm16
|
|
; CHECK: $xmm16 = VSQRTPDZ128m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VSQRTPDZ128m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSQRTPDZ128r $xmm16
|
|
$xmm16 = VSQRTPDZ128r $xmm16
|
|
; CHECK: $xmm16 = VSQRTPSZ128m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VSQRTPSZ128m $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSQRTPSZ128r $xmm16
|
|
$xmm16 = VSQRTPSZ128r $xmm16
|
|
; CHECK: $xmm16 = VMOVDDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VMOVDDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VMOVDDUPZ128rr $xmm16
|
|
$xmm16 = VMOVDDUPZ128rr $xmm16
|
|
; CHECK: $xmm16 = VMOVSHDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VMOVSHDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VMOVSHDUPZ128rr $xmm16
|
|
$xmm16 = VMOVSHDUPZ128rr $xmm16
|
|
; CHECK: $xmm16 = VMOVSLDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VMOVSLDUPZ128rm $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VMOVSLDUPZ128rr $xmm16
|
|
$xmm16 = VMOVSLDUPZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPSHUFBZ128rm $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VPSHUFBZ128rm $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPSHUFBZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSHUFBZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VPSHUFDZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm16 = VPSHUFDZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPSHUFDZ128ri $xmm16, -24
|
|
$xmm16 = VPSHUFDZ128ri $xmm16, -24
|
|
; CHECK: $xmm16 = VPSHUFHWZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm16 = VPSHUFHWZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPSHUFHWZ128ri $xmm16, -24
|
|
$xmm16 = VPSHUFHWZ128ri $xmm16, -24
|
|
; CHECK: $xmm16 = VPSHUFLWZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
$xmm16 = VPSHUFLWZ128mi $rdi, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPSHUFLWZ128ri $xmm16, -24
|
|
$xmm16 = VPSHUFLWZ128ri $xmm16, -24
|
|
; CHECK: $xmm16 = VPSLLDQZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSLLDQZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VSHUFPDZ128rmi $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VSHUFPDZ128rmi $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSHUFPDZ128rri $xmm16, $noreg, $noreg
|
|
$xmm16 = VSHUFPDZ128rri $xmm16, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSHUFPSZ128rmi $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VSHUFPSZ128rmi $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSHUFPSZ128rri $xmm16, $noreg, $noreg
|
|
$xmm16 = VSHUFPSZ128rri $xmm16, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPSADBWZ128rm $xmm16, 1, $noreg, $rax, $noreg, $noreg
|
|
$xmm16 = VPSADBWZ128rm $xmm16, 1, $noreg, $rax, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPSADBWZ128rr $xmm16, $xmm1
|
|
$xmm16 = VPSADBWZ128rr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VBROADCASTSSZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VBROADCASTSSZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VBROADCASTSSZ128r $xmm16
|
|
$xmm16 = VBROADCASTSSZ128r $xmm16
|
|
; CHECK: $xmm16 = VPBROADCASTBZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VPBROADCASTBZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPBROADCASTBZ128r $xmm16
|
|
$xmm16 = VPBROADCASTBZ128r $xmm16
|
|
; CHECK: $xmm16 = VPBROADCASTDZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VPBROADCASTDZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPBROADCASTDZ128r $xmm16
|
|
$xmm16 = VPBROADCASTDZ128r $xmm16
|
|
; CHECK: $xmm16 = VPBROADCASTQZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VPBROADCASTQZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPBROADCASTQZ128r $xmm16
|
|
$xmm16 = VPBROADCASTQZ128r $xmm16
|
|
; CHECK: $xmm16 = VPBROADCASTWZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VPBROADCASTWZ128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPBROADCASTWZ128r $xmm16
|
|
$xmm16 = VPBROADCASTWZ128r $xmm16
|
|
; CHECK: $xmm16 = VBROADCASTI32X2Z128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VBROADCASTI32X2Z128m $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VBROADCASTI32X2Z128r $xmm0
|
|
$xmm16 = VBROADCASTI32X2Z128r $xmm0
|
|
; CHECK: $xmm16 = VCVTPS2PHZ128rr $xmm16, 2
|
|
$xmm16 = VCVTPS2PHZ128rr $xmm16, 2
|
|
; CHECK: VCVTPS2PHZ128mr $rdi, $xmm16, 1, $noreg, 0, $noreg, $noreg
|
|
VCVTPS2PHZ128mr $rdi, $xmm16, 1, $noreg, 0, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPABSBZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPABSBZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPABSBZ128rr $xmm16
|
|
$xmm16 = VPABSBZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPABSDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPABSDZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPABSDZ128rr $xmm16
|
|
$xmm16 = VPABSDZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPABSWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VPABSWZ128rm $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VPABSWZ128rr $xmm16
|
|
$xmm16 = VPABSWZ128rr $xmm16
|
|
; CHECK: $xmm16 = VPALIGNRZ128rmi $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VPALIGNRZ128rmi $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VPALIGNRZ128rri $xmm16, $xmm1, 15
|
|
$xmm16 = VPALIGNRZ128rri $xmm16, $xmm1, 15
|
|
; CHECK: VEXTRACTPSZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, $noreg
|
|
VEXTRACTPSZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, $noreg
|
|
; CHECK: $eax = VEXTRACTPSZrr $xmm16, $noreg
|
|
$eax = VEXTRACTPSZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VINSERTPSZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VINSERTPSZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VINSERTPSZrr $xmm16, $xmm16, $noreg
|
|
$xmm16 = VINSERTPSZrr $xmm16, $xmm16, $noreg
|
|
|
|
RET 0, $zmm0, $zmm1
|
|
...
|
|
---
|
|
# CHECK-LABEL: name: evex_scalar_to_evex_test
|
|
# CHECK: bb.0:
|
|
|
|
name: evex_scalar_to_evex_test
|
|
body: |
|
|
bb.0:
|
|
; CHECK: $xmm16 = VADDSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VADDSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VADDSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VADDSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VADDSDZrr $xmm16, $xmm1
|
|
$xmm16 = VADDSDZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VADDSDZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VADDSDZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VADDSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VADDSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VADDSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VADDSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VADDSSZrr $xmm16, $xmm1
|
|
$xmm16 = VADDSSZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VADDSSZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VADDSSZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VDIVSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VDIVSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VDIVSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VDIVSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VDIVSDZrr $xmm16, $xmm1
|
|
$xmm16 = VDIVSDZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VDIVSDZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VDIVSDZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VDIVSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VDIVSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VDIVSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VDIVSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VDIVSSZrr $xmm16, $xmm1
|
|
$xmm16 = VDIVSSZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VDIVSSZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VDIVSSZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXCSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXCSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXCSDZrr $xmm16, $xmm1
|
|
$xmm16 = VMAXCSDZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXCSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXCSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXCSSZrr $xmm16, $xmm1
|
|
$xmm16 = VMAXCSSZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXSDZrr $xmm16, $xmm1
|
|
$xmm16 = VMAXSDZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXSDZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VMAXSDZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMAXSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMAXSSZrr $xmm16, $xmm1
|
|
$xmm16 = VMAXSSZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMAXSSZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VMAXSSZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINCSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINCSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINCSDZrr $xmm16, $xmm1
|
|
$xmm16 = VMINCSDZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINCSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINCSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINCSSZrr $xmm16, $xmm1
|
|
$xmm16 = VMINCSSZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINSDZrr $xmm16, $xmm1
|
|
$xmm16 = VMINSDZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINSDZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VMINSDZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMINSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMINSSZrr $xmm16, $xmm1
|
|
$xmm16 = VMINSSZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMINSSZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VMINSSZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMULSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMULSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMULSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMULSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMULSDZrr $xmm16, $xmm1
|
|
$xmm16 = VMULSDZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMULSDZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VMULSDZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMULSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMULSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMULSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VMULSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VMULSSZrr $xmm16, $xmm1
|
|
$xmm16 = VMULSSZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VMULSSZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VMULSSZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VSUBSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VSUBSDZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VSUBSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VSUBSDZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VSUBSDZrr $xmm16, $xmm1
|
|
$xmm16 = VSUBSDZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VSUBSDZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VSUBSDZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VSUBSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VSUBSSZrm $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VSUBSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
$xmm16 = VSUBSSZrm_Int $xmm16, $rip, 1, $noreg, $rax, $noreg
|
|
; CHECK: $xmm16 = VSUBSSZrr $xmm16, $xmm1
|
|
$xmm16 = VSUBSSZrr $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VSUBSSZrr_Int $xmm16, $xmm1
|
|
$xmm16 = VSUBSSZrr_Int $xmm16, $xmm1
|
|
; CHECK: $xmm16 = VFMADD132SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD132SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD132SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD132SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD132SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD132SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD132SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD132SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD132SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD132SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD132SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD132SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD132SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD132SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD132SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD132SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD213SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD213SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD213SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD213SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD213SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD213SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD213SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD213SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD213SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD213SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD213SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD213SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD213SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD213SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD213SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD213SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD231SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD231SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD231SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD231SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD231SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD231SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD231SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD231SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD231SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD231SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD231SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMADD231SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMADD231SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD231SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMADD231SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMADD231SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB132SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB132SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB132SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB132SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB132SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB132SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB132SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB132SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB132SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB132SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB132SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB132SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB132SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB132SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB132SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB132SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB213SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB213SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB213SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB213SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB213SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB213SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB213SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB213SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB213SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB213SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB213SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB213SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB213SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB213SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB213SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB213SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB231SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB231SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB231SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB231SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB231SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB231SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB231SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB231SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB231SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB231SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB231SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFMSUB231SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFMSUB231SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB231SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFMSUB231SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFMSUB231SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD132SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD132SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD132SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD132SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD132SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD132SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD132SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD132SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD132SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD132SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD132SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD132SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD132SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD132SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD132SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD132SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD213SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD213SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD213SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD213SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD213SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD213SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD213SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD213SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD213SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD213SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD213SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD213SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD213SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD213SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD213SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD213SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD231SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD231SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD231SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD231SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD231SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD231SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD231SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD231SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD231SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD231SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD231SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMADD231SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMADD231SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD231SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMADD231SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMADD231SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB132SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB132SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB132SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB132SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB132SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB132SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB132SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB132SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB132SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB132SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB132SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB132SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB132SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB132SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB132SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB132SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB213SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB213SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB213SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB213SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB213SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB213SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB213SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB213SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB213SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB213SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB213SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB213SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB213SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB213SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB213SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB213SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB231SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB231SDZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB231SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB231SDZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB231SDZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB231SDZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB231SDZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB231SDZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB231SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB231SSZm $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB231SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VFNMSUB231SSZm_Int $xmm16, $xmm16, $rsi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VFNMSUB231SSZr $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB231SSZr $xmm16, $xmm1, $xmm2
|
|
; CHECK: $xmm16 = VFNMSUB231SSZr_Int $xmm16, $xmm1, $xmm2
|
|
$xmm16 = VFNMSUB231SSZr_Int $xmm16, $xmm1, $xmm2
|
|
; CHECK: VPEXTRBZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
|
|
VPEXTRBZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
|
|
; CHECK: $eax = VPEXTRBZrr $xmm16, 1
|
|
$eax = VPEXTRBZrr $xmm16, 1
|
|
; CHECK: VPEXTRDZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
|
|
VPEXTRDZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
|
|
; CHECK: $eax = VPEXTRDZrr $xmm16, 1
|
|
$eax = VPEXTRDZrr $xmm16, 1
|
|
; CHECK: VPEXTRQZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
|
|
VPEXTRQZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
|
|
; CHECK: $rax = VPEXTRQZrr $xmm16, 1
|
|
$rax = VPEXTRQZrr $xmm16, 1
|
|
; CHECK: VPEXTRWZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
|
|
VPEXTRWZmr $rdi, 1, $noreg, 0, $noreg, $xmm16, 3
|
|
; CHECK: $eax = VPEXTRWZrr $xmm16, 1
|
|
$eax = VPEXTRWZrr $xmm16, 1
|
|
; CHECK: $eax = VPEXTRWZrr_REV $xmm16, 1
|
|
$eax = VPEXTRWZrr_REV $xmm16, 1
|
|
; CHECK: $xmm16 = VPINSRBZrm $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
|
|
$xmm16 = VPINSRBZrm $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
|
|
; CHECK: $xmm16 = VPINSRBZrr $xmm16, $edi, 5
|
|
$xmm16 = VPINSRBZrr $xmm16, $edi, 5
|
|
; CHECK: $xmm16 = VPINSRDZrm $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
|
|
$xmm16 = VPINSRDZrm $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
|
|
; CHECK: $xmm16 = VPINSRDZrr $xmm16, $edi, 5
|
|
$xmm16 = VPINSRDZrr $xmm16, $edi, 5
|
|
; CHECK: $xmm16 = VPINSRQZrm $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
|
|
$xmm16 = VPINSRQZrm $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
|
|
; CHECK: $xmm16 = VPINSRQZrr $xmm16, $rdi, 5
|
|
$xmm16 = VPINSRQZrr $xmm16, $rdi, 5
|
|
; CHECK: $xmm16 = VPINSRWZrm $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
|
|
$xmm16 = VPINSRWZrm $xmm16, $rsi, 1, $noreg, 0, $noreg, 3
|
|
; CHECK: $xmm16 = VPINSRWZrr $xmm16, $edi, 5
|
|
$xmm16 = VPINSRWZrr $xmm16, $edi, 5
|
|
; CHECK: $xmm16 = VSQRTSDZm $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VSQRTSDZm $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSQRTSDZm_Int $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VSQRTSDZm_Int $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSQRTSDZr $xmm16, $noreg
|
|
$xmm16 = VSQRTSDZr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VSQRTSDZr_Int $xmm16, $noreg
|
|
$xmm16 = VSQRTSDZr_Int $xmm16, $noreg
|
|
; CHECK: $xmm16 = VSQRTSSZm $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VSQRTSSZm $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSQRTSSZm_Int $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VSQRTSSZm_Int $xmm16, $noreg, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VSQRTSSZr $xmm16, $noreg
|
|
$xmm16 = VSQRTSSZr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VSQRTSSZr_Int $xmm16, $noreg
|
|
$xmm16 = VSQRTSSZr_Int $xmm16, $noreg
|
|
; CHECK: $rdi = VCVTSD2SI64Zrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
$rdi = VCVTSD2SI64Zrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTSD2SI64Zrr_Int $xmm16
|
|
$rdi = VCVTSD2SI64Zrr_Int $xmm16
|
|
; CHECK: $edi = VCVTSD2SIZrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
$edi = VCVTSD2SIZrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTSD2SIZrr_Int $xmm16
|
|
$edi = VCVTSD2SIZrr_Int $xmm16
|
|
; CHECK: $xmm16 = VCVTSD2SSZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSD2SSZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSD2SSZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSD2SSZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSD2SSZrr $xmm16, $noreg
|
|
$xmm16 = VCVTSD2SSZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSD2SSZrr_Int $xmm16, $noreg
|
|
$xmm16 = VCVTSD2SSZrr_Int $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSI2SDZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSI2SDZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSI2SDZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSI2SDZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSI2SDZrr $xmm16, $noreg
|
|
$xmm16 = VCVTSI2SDZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSI2SDZrr_Int $xmm16, $noreg
|
|
$xmm16 = VCVTSI2SDZrr_Int $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSI2SSZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSI2SSZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSI2SSZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSI2SSZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSI2SSZrr $xmm16, $noreg
|
|
$xmm16 = VCVTSI2SSZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSI2SSZrr_Int $xmm16, $noreg
|
|
$xmm16 = VCVTSI2SSZrr_Int $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSI642SDZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSI642SDZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSI642SDZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSI642SDZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSI642SDZrr $xmm16, $noreg
|
|
$xmm16 = VCVTSI642SDZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSI642SDZrr_Int $xmm16, $noreg
|
|
$xmm16 = VCVTSI642SDZrr_Int $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSI642SSZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSI642SSZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSI642SSZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSI642SSZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSI642SSZrr $xmm16, $noreg
|
|
$xmm16 = VCVTSI642SSZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSI642SSZrr_Int $xmm16, $noreg
|
|
$xmm16 = VCVTSI642SSZrr_Int $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSS2SDZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSS2SDZrm $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSS2SDZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
$xmm16 = VCVTSS2SDZrm_Int $xmm16, $rdi, 1, $noreg, 0, $noreg
|
|
; CHECK: $xmm16 = VCVTSS2SDZrr $xmm16, $noreg
|
|
$xmm16 = VCVTSS2SDZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VCVTSS2SDZrr_Int $xmm16, $noreg
|
|
$xmm16 = VCVTSS2SDZrr_Int $xmm16, $noreg
|
|
; CHECK: $rdi = VCVTSS2SI64Zrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
$rdi = VCVTSS2SI64Zrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTSS2SI64Zrr_Int $xmm16
|
|
$rdi = VCVTSS2SI64Zrr_Int $xmm16
|
|
; CHECK: $edi = VCVTSS2SIZrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
$edi = VCVTSS2SIZrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTSS2SIZrr_Int $xmm16
|
|
$edi = VCVTSS2SIZrr_Int $xmm16
|
|
; CHECK: $rdi = VCVTTSD2SI64Zrm $rdi, $xmm16, 1, $noreg, 0
|
|
$rdi = VCVTTSD2SI64Zrm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTTSD2SI64Zrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
$rdi = VCVTTSD2SI64Zrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTTSD2SI64Zrr $xmm16
|
|
$rdi = VCVTTSD2SI64Zrr $xmm16
|
|
; CHECK: $rdi = VCVTTSD2SI64Zrr_Int $xmm16
|
|
$rdi = VCVTTSD2SI64Zrr_Int $xmm16
|
|
; CHECK: $edi = VCVTTSD2SIZrm $rdi, $xmm16, 1, $noreg, 0
|
|
$edi = VCVTTSD2SIZrm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTTSD2SIZrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
$edi = VCVTTSD2SIZrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTTSD2SIZrr $xmm16
|
|
$edi = VCVTTSD2SIZrr $xmm16
|
|
; CHECK: $edi = VCVTTSD2SIZrr_Int $xmm16
|
|
$edi = VCVTTSD2SIZrr_Int $xmm16
|
|
; CHECK: $rdi = VCVTTSS2SI64Zrm $rdi, $xmm16, 1, $noreg, 0
|
|
$rdi = VCVTTSS2SI64Zrm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTTSS2SI64Zrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
$rdi = VCVTTSS2SI64Zrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $rdi = VCVTTSS2SI64Zrr $xmm16
|
|
$rdi = VCVTTSS2SI64Zrr $xmm16
|
|
; CHECK: $rdi = VCVTTSS2SI64Zrr_Int $xmm16
|
|
$rdi = VCVTTSS2SI64Zrr_Int $xmm16
|
|
; CHECK: $edi = VCVTTSS2SIZrm $rdi, $xmm16, 1, $noreg, 0
|
|
$edi = VCVTTSS2SIZrm $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTTSS2SIZrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
$edi = VCVTTSS2SIZrm_Int $rdi, $xmm16, 1, $noreg, 0
|
|
; CHECK: $edi = VCVTTSS2SIZrr $xmm16
|
|
$edi = VCVTTSS2SIZrr $xmm16
|
|
; CHECK: $edi = VCVTTSS2SIZrr_Int $xmm16
|
|
$edi = VCVTTSS2SIZrr_Int $xmm16
|
|
; CHECK: $xmm16 = VMOV64toSDZrr $rdi
|
|
$xmm16 = VMOV64toSDZrr $rdi
|
|
; CHECK: $xmm16 = VMOVDI2SSZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VMOVDI2SSZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOVDI2SSZrr $eax
|
|
$xmm16 = VMOVDI2SSZrr $eax
|
|
; CHECK: VMOVSDZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
VMOVSDZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOVSDZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VMOVSDZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOVSDZrr $xmm16, $noreg
|
|
$xmm16 = VMOVSDZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VMOVSDZrr_REV $xmm16, $noreg
|
|
$xmm16 = VMOVSDZrr_REV $xmm16, $noreg
|
|
; CHECK: $rax = VMOVSDto64Zrr $xmm16
|
|
$rax = VMOVSDto64Zrr $xmm16
|
|
; CHECK: VMOVSDto64Zmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
VMOVSDto64Zmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: VMOVSSZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
VMOVSSZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOVSSZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VMOVSSZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOVSSZrr $xmm16, $noreg
|
|
$xmm16 = VMOVSSZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VMOVSSZrr_REV $xmm16, $noreg
|
|
$xmm16 = VMOVSSZrr_REV $xmm16, $noreg
|
|
; CHECK: VMOVSS2DIZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
VMOVSS2DIZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $eax = VMOVSS2DIZrr $xmm16
|
|
$eax = VMOVSS2DIZrr $xmm16
|
|
; CHECK: $xmm16 = VMOV64toPQIZrr $rdi
|
|
$xmm16 = VMOV64toPQIZrr $rdi
|
|
; CHECK: $xmm16 = VMOV64toPQIZrm $rdi, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VMOV64toPQIZrm $rdi, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOV64toSDZrr $rdi
|
|
$xmm16 = VMOV64toSDZrr $rdi
|
|
; CHECK: $xmm16 = VMOVDI2PDIZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VMOVDI2PDIZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOVDI2PDIZrr $edi
|
|
$xmm16 = VMOVDI2PDIZrr $edi
|
|
; CHECK: $xmm16 = VMOVLHPSZrr $xmm16, $noreg
|
|
$xmm16 = VMOVLHPSZrr $xmm16, $noreg
|
|
; CHECK: $xmm16 = VMOVHLPSZrr $xmm16, $noreg
|
|
$xmm16 = VMOVHLPSZrr $xmm16, $noreg
|
|
; CHECK: VMOVPDI2DIZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
VMOVPDI2DIZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $edi = VMOVPDI2DIZrr $xmm16
|
|
$edi = VMOVPDI2DIZrr $xmm16
|
|
; CHECK: $xmm16 = VMOVPQI2QIZrr $xmm16
|
|
$xmm16 = VMOVPQI2QIZrr $xmm16
|
|
; CHECK: VMOVPQI2QIZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
VMOVPQI2QIZmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $rdi = VMOVPQIto64Zrr $xmm16
|
|
$rdi = VMOVPQIto64Zrr $xmm16
|
|
; CHECK: VMOVPQIto64Zmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
VMOVPQIto64Zmr $rdi, $xmm16, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOVQI2PQIZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
$xmm16 = VMOVQI2PQIZrm $rip, $noreg, $noreg, $noreg, $noreg
|
|
; CHECK: $xmm16 = VMOVZPQILo2PQIZrr $xmm16
|
|
$xmm16 = VMOVZPQILo2PQIZrr $xmm16
|
|
; CHECK: VCOMISDZrm_Int $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VCOMISDZrm_Int $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VCOMISDZrr_Int $xmm16, $xmm1, implicit-def $eflags
|
|
VCOMISDZrr_Int $xmm16, $xmm1, implicit-def $eflags
|
|
; CHECK: VCOMISSZrm_Int $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VCOMISSZrm_Int $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VCOMISSZrr_Int $xmm16, $xmm1, implicit-def $eflags
|
|
VCOMISSZrr_Int $xmm16, $xmm1, implicit-def $eflags
|
|
; CHECK: VUCOMISDZrm_Int $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VUCOMISDZrm_Int $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VUCOMISDZrr_Int $xmm16, $xmm1, implicit-def $eflags
|
|
VUCOMISDZrr_Int $xmm16, $xmm1, implicit-def $eflags
|
|
; CHECK: VUCOMISSZrm_Int $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VUCOMISSZrm_Int $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VUCOMISSZrr_Int $xmm16, $xmm1, implicit-def $eflags
|
|
VUCOMISSZrr_Int $xmm16, $xmm1, implicit-def $eflags
|
|
; CHECK: VCOMISDZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VCOMISDZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VCOMISDZrr $xmm16, $xmm1, implicit-def $eflags
|
|
VCOMISDZrr $xmm16, $xmm1, implicit-def $eflags
|
|
; CHECK: VCOMISSZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VCOMISSZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VCOMISSZrr $xmm16, $xmm1, implicit-def $eflags
|
|
VCOMISSZrr $xmm16, $xmm1, implicit-def $eflags
|
|
; CHECK: VUCOMISDZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VUCOMISDZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VUCOMISDZrr $xmm16, $xmm1, implicit-def $eflags
|
|
VUCOMISDZrr $xmm16, $xmm1, implicit-def $eflags
|
|
; CHECK: VUCOMISSZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
VUCOMISSZrm $xmm16, $rdi, $noreg, $noreg, $noreg, $noreg, implicit-def $eflags
|
|
; CHECK: VUCOMISSZrr $xmm16, $xmm1, implicit-def $eflags
|
|
VUCOMISSZrr $xmm16, $xmm1, implicit-def $eflags
|
|
|
|
RET 0, $zmm0, $zmm1
|
|
...
|