[Verifier] Support masked load/store with opaque pointers

This commit is contained in:
Nikita Popov 2021-06-26 18:11:59 +02:00
parent b62de20190
commit 8c2d4621d9
2 changed files with 57 additions and 27 deletions

View File

@ -5066,15 +5066,14 @@ void Verifier::visitIntrinsicCall(Intrinsic::ID ID, CallBase &Call) {
Assert(Alignment->getValue().isPowerOf2(), Assert(Alignment->getValue().isPowerOf2(),
"masked_load: alignment must be a power of 2", Call); "masked_load: alignment must be a power of 2", Call);
// DataTy is the overloaded type PointerType *PtrTy = cast<PointerType>(Ptr->getType());
Type *DataTy = cast<PointerType>(Ptr->getType())->getElementType(); Assert(PtrTy->isOpaqueOrPointeeTypeMatches(Call.getType()),
Assert(DataTy == Call.getType(),
"masked_load: return must match pointer type", Call); "masked_load: return must match pointer type", Call);
Assert(PassThru->getType() == DataTy, Assert(PassThru->getType() == Call.getType(),
"masked_load: pass through and data type must match", Call); "masked_load: pass through and return type must match", Call);
Assert(cast<VectorType>(Mask->getType())->getElementCount() == Assert(cast<VectorType>(Mask->getType())->getElementCount() ==
cast<VectorType>(DataTy)->getElementCount(), cast<VectorType>(Call.getType())->getElementCount(),
"masked_load: vector mask must be same length as data", Call); "masked_load: vector mask must be same length as return", Call);
break; break;
} }
case Intrinsic::masked_store: { case Intrinsic::masked_store: {
@ -5087,13 +5086,12 @@ void Verifier::visitIntrinsicCall(Intrinsic::ID ID, CallBase &Call) {
Assert(Alignment->getValue().isPowerOf2(), Assert(Alignment->getValue().isPowerOf2(),
"masked_store: alignment must be a power of 2", Call); "masked_store: alignment must be a power of 2", Call);
// DataTy is the overloaded type PointerType *PtrTy = cast<PointerType>(Ptr->getType());
Type *DataTy = cast<PointerType>(Ptr->getType())->getElementType(); Assert(PtrTy->isOpaqueOrPointeeTypeMatches(Val->getType()),
Assert(DataTy == Val->getType(),
"masked_store: storee must match pointer type", Call); "masked_store: storee must match pointer type", Call);
Assert(cast<VectorType>(Mask->getType())->getElementCount() == Assert(cast<VectorType>(Mask->getType())->getElementCount() ==
cast<VectorType>(DataTy)->getElementCount(), cast<VectorType>(Val->getType())->getElementCount(),
"masked_store: vector mask must be same length as data", Call); "masked_store: vector mask must be same length as value", Call);
break; break;
} }

View File

@ -1,36 +1,68 @@
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
; RUN: opt -passes=verify -S < %s | FileCheck %s ; RUN: opt -passes=verify -S < %s | FileCheck %s
; CHECK: @load
define i32 @load(ptr %a) { define i32 @load(ptr %a) {
%i = load i32, ptr %a ; CHECK-LABEL: @load(
ret i32 %i ; CHECK-NEXT: [[I:%.*]] = load i32, ptr [[A:%.*]], align 4
; CHECK-NEXT: ret i32 [[I]]
;
%i = load i32, ptr %a
ret i32 %i
} }
; CHECK: @store
define void @store(ptr %a, i32 %i) { define void @store(ptr %a, i32 %i) {
store i32 %i, ptr %a ; CHECK-LABEL: @store(
ret void ; CHECK-NEXT: store i32 [[I:%.*]], ptr [[A:%.*]], align 4
; CHECK-NEXT: ret void
;
store i32 %i, ptr %a
ret void
} }
; CHECK: @cmpxchg
define void @cmpxchg(ptr %p, i32 %a, i32 %b) { define void @cmpxchg(ptr %p, i32 %a, i32 %b) {
%val_success = cmpxchg ptr %p, i32 %a, i32 %b acq_rel monotonic ; CHECK-LABEL: @cmpxchg(
ret void ; CHECK-NEXT: [[VAL_SUCCESS:%.*]] = cmpxchg ptr [[P:%.*]], i32 [[A:%.*]], i32 [[B:%.*]] acq_rel monotonic, align 4
; CHECK-NEXT: ret void
;
%val_success = cmpxchg ptr %p, i32 %a, i32 %b acq_rel monotonic
ret void
} }
; CHECK: @atomicrmw
define void @atomicrmw(ptr %a, i32 %i) { define void @atomicrmw(ptr %a, i32 %i) {
%b = atomicrmw add ptr %a, i32 %i acquire ; CHECK-LABEL: @atomicrmw(
ret void ; CHECK-NEXT: [[B:%.*]] = atomicrmw add ptr [[A:%.*]], i32 [[I:%.*]] acquire, align 4
; CHECK-NEXT: ret void
;
%b = atomicrmw add ptr %a, i32 %i acquire
ret void
} }
define void @opaque_mangle(ptr %a) { define void @opaque_mangle(ptr %a) {
call void @llvm.lifetime.start.p0(i64 8, ptr %a) ; CHECK-LABEL: @opaque_mangle(
call void @llvm.lifetime.end.p0(i64 8, ptr %a) ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 8, ptr [[A:%.*]])
ret void ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 8, ptr [[A]])
; CHECK-NEXT: ret void
;
call void @llvm.lifetime.start.p0(i64 8, ptr %a)
call void @llvm.lifetime.end.p0(i64 8, ptr %a)
ret void
}
define void @intrinsic_calls(ptr %a) {
; CHECK-LABEL: @intrinsic_calls(
; CHECK-NEXT: [[TMP1:%.*]] = call <2 x i32> @llvm.masked.load.v2i32.p0(ptr [[A:%.*]], i32 4, <2 x i1> zeroinitializer, <2 x i32> zeroinitializer)
; CHECK-NEXT: call void @llvm.masked.store.v2i32.p0(<2 x i32> zeroinitializer, ptr [[A]], i32 4, <2 x i1> zeroinitializer)
; CHECK-NEXT: ret void
;
call <2 x i32> @llvm.masked.load.v2i32.p0(ptr %a, i32 4, <2 x i1> zeroinitializer, <2 x i32> zeroinitializer)
call void @llvm.masked.store.v2i32.p0(<2 x i32> zeroinitializer, ptr %a, i32 4, <2 x i1> zeroinitializer)
ret void
} }
; CHECK: @llvm.lifetime.start.p0 ; CHECK: @llvm.lifetime.start.p0
; CHECK: @llvm.lifetime.end.p0 ; CHECK: @llvm.lifetime.end.p0
declare void @llvm.lifetime.start.p0(i64, ptr nocapture) declare void @llvm.lifetime.start.p0(i64, ptr nocapture)
declare void @llvm.lifetime.end.p0(i64, ptr nocapture) declare void @llvm.lifetime.end.p0(i64, ptr nocapture)
declare <2 x i32> @llvm.masked.load.v2i32.p0(ptr, i32, <2 x i1>, <2 x i32>)
declare void @llvm.masked.store.v2i32.p0(<2 x i32>, ptr, i32, <2 x i1>)