llvm-project/llvm/test/CodeGen/X86/memset-zero.ll

359 lines
9.6 KiB
LLVM

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=i686-unknown-linux | FileCheck %s --check-prefix=X86
; RUN: llc < %s -mtriple=x86_64-unknown-linux -mcpu=core2 | FileCheck %s --check-prefix=CORE2
; RUN: llc < %s -mtriple=x86_64-unknown-linux -mcpu=nehalem | FileCheck %s --check-prefix=NEHALEM
declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i1) nounwind
define void @memset_0(i8* %a) nounwind {
; X86-LABEL: memset_0:
; X86: # %bb.0: # %entry
; X86-NEXT: retl
;
; CORE2-LABEL: memset_0:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_0:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 0, i1 false)
ret void
}
define void @memset_4(i8* %a) nounwind {
; X86-LABEL: memset_4:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_4:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movl $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_4:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: movl $0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 4, i1 false)
ret void
}
define void @memset_5(i8* %a) nounwind {
; X86-LABEL: memset_5:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_5:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movb $0, 4(%rdi)
; CORE2-NEXT: movl $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_5:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: movb $0, 4(%rdi)
; NEHALEM-NEXT: movl $0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 5, i1 false)
ret void
}
define void @memset_7(i8* %a) nounwind {
; X86-LABEL: memset_7:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 6(%eax)
; X86-NEXT: movw $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_7:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movb $0, 6(%rdi)
; CORE2-NEXT: movw $0, 4(%rdi)
; CORE2-NEXT: movl $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_7:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: movb $0, 6(%rdi)
; NEHALEM-NEXT: movw $0, 4(%rdi)
; NEHALEM-NEXT: movl $0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 7, i1 false)
ret void
}
define void @memset_8(i8* %a) nounwind {
; X86-LABEL: memset_8:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_8:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_8:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: movq $0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 8, i1 false)
ret void
}
define void @memset_11(i8* %a) nounwind {
; X86-LABEL: memset_11:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 10(%eax)
; X86-NEXT: movw $0, 8(%eax)
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_11:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movb $0, 10(%rdi)
; CORE2-NEXT: movw $0, 8(%rdi)
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_11:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: movb $0, 10(%rdi)
; NEHALEM-NEXT: movw $0, 8(%rdi)
; NEHALEM-NEXT: movq $0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 11, i1 false)
ret void
}
define void @memset_13(i8* %a) nounwind {
; X86-LABEL: memset_13:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 12(%eax)
; X86-NEXT: movl $0, 8(%eax)
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_13:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movq $0, 5(%rdi)
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_13:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: movq $0, 5(%rdi)
; NEHALEM-NEXT: movq $0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 13, i1 false)
ret void
}
define void @memset_15(i8* %a) nounwind {
; X86-LABEL: memset_15:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 14(%eax)
; X86-NEXT: movw $0, 12(%eax)
; X86-NEXT: movl $0, 8(%eax)
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_15:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movq $0, 7(%rdi)
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_15:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: movq $0, 7(%rdi)
; NEHALEM-NEXT: movq $0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 15, i1 false)
ret void
}
define void @memset_16(i8* %a) nounwind {
; X86-LABEL: memset_16:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl $0, 12(%eax)
; X86-NEXT: movl $0, 8(%eax)
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_16:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movq $0, 8(%rdi)
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_16:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: xorps %xmm0, %xmm0
; NEHALEM-NEXT: movups %xmm0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 16, i1 false)
ret void
}
define void @memset_17(i8* %a) nounwind {
; X86-LABEL: memset_17:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 16(%eax)
; X86-NEXT: movl $0, 12(%eax)
; X86-NEXT: movl $0, 8(%eax)
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_17:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movb $0, 16(%rdi)
; CORE2-NEXT: movq $0, 8(%rdi)
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_17:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: xorps %xmm0, %xmm0
; NEHALEM-NEXT: movups %xmm0, (%rdi)
; NEHALEM-NEXT: movb $0, 16(%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 17, i1 false)
ret void
}
define void @memset_19(i8* %a) nounwind {
; X86-LABEL: memset_19:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 18(%eax)
; X86-NEXT: movw $0, 16(%eax)
; X86-NEXT: movl $0, 12(%eax)
; X86-NEXT: movl $0, 8(%eax)
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_19:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movb $0, 18(%rdi)
; CORE2-NEXT: movw $0, 16(%rdi)
; CORE2-NEXT: movq $0, 8(%rdi)
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_19:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: xorps %xmm0, %xmm0
; NEHALEM-NEXT: movups %xmm0, (%rdi)
; NEHALEM-NEXT: movb $0, 18(%rdi)
; NEHALEM-NEXT: movw $0, 16(%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 19, i1 false)
ret void
}
define void @memset_31(i8* %a) nounwind {
; X86-LABEL: memset_31:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 30(%eax)
; X86-NEXT: movw $0, 28(%eax)
; X86-NEXT: movl $0, 24(%eax)
; X86-NEXT: movl $0, 20(%eax)
; X86-NEXT: movl $0, 16(%eax)
; X86-NEXT: movl $0, 12(%eax)
; X86-NEXT: movl $0, 8(%eax)
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_31:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movq $0, 23(%rdi)
; CORE2-NEXT: movq $0, 16(%rdi)
; CORE2-NEXT: movq $0, 8(%rdi)
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_31:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: xorps %xmm0, %xmm0
; NEHALEM-NEXT: movups %xmm0, 15(%rdi)
; NEHALEM-NEXT: movups %xmm0, (%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 31, i1 false)
ret void
}
define void @memset_35(i8* %a) nounwind {
; X86-LABEL: memset_35:
; X86: # %bb.0: # %entry
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movb $0, 34(%eax)
; X86-NEXT: movw $0, 32(%eax)
; X86-NEXT: movl $0, 28(%eax)
; X86-NEXT: movl $0, 24(%eax)
; X86-NEXT: movl $0, 20(%eax)
; X86-NEXT: movl $0, 16(%eax)
; X86-NEXT: movl $0, 12(%eax)
; X86-NEXT: movl $0, 8(%eax)
; X86-NEXT: movl $0, 4(%eax)
; X86-NEXT: movl $0, (%eax)
; X86-NEXT: retl
;
; CORE2-LABEL: memset_35:
; CORE2: # %bb.0: # %entry
; CORE2-NEXT: movb $0, 34(%rdi)
; CORE2-NEXT: movw $0, 32(%rdi)
; CORE2-NEXT: movq $0, 24(%rdi)
; CORE2-NEXT: movq $0, 16(%rdi)
; CORE2-NEXT: movq $0, 8(%rdi)
; CORE2-NEXT: movq $0, (%rdi)
; CORE2-NEXT: retq
;
; NEHALEM-LABEL: memset_35:
; NEHALEM: # %bb.0: # %entry
; NEHALEM-NEXT: xorps %xmm0, %xmm0
; NEHALEM-NEXT: movups %xmm0, 16(%rdi)
; NEHALEM-NEXT: movups %xmm0, (%rdi)
; NEHALEM-NEXT: movb $0, 34(%rdi)
; NEHALEM-NEXT: movw $0, 32(%rdi)
; NEHALEM-NEXT: retq
entry:
call void @llvm.memset.p0i8.i64(i8* %a, i8 0, i64 35, i1 false)
ret void
}