Sema: produce error when invalid ordering is passed to atomic builtin

This is a conservative check, because it's valid for the expression to be
non-constant, and in cases like that we just don't know whether it's valid.

rdar://problem/16242991

llvm-svn: 203561
This commit is contained in:
Tim Northover 2014-03-11 10:49:14 +00:00
parent 0622b3a67a
commit e94a34cae2
7 changed files with 282 additions and 40 deletions

View File

@ -4728,6 +4728,16 @@ public:
BI_First = 0
};
// The ABI values for various atomic memory orderings.
enum AtomicOrderingKind {
AO_ABI_memory_order_relaxed = 0,
AO_ABI_memory_order_consume = 1,
AO_ABI_memory_order_acquire = 2,
AO_ABI_memory_order_release = 3,
AO_ABI_memory_order_acq_rel = 4,
AO_ABI_memory_order_seq_cst = 5
};
private:
enum { PTR, ORDER, VAL1, ORDER_FAIL, VAL2, WEAK, END_EXPR };
Stmt* SubExprs[END_EXPR];

View File

@ -5689,7 +5689,9 @@ def err_atomic_op_needs_atomic_int_or_ptr : Error<
def err_atomic_op_bitwise_needs_atomic_int : Error<
"address argument to bitwise atomic operation must be a pointer to "
"%select{|atomic }0integer (%1 invalid)">;
def err_atomic_op_has_invalid_memory_order : Error<
"memory order argument to atomic operation is invalid">;
def err_atomic_load_store_uses_lib : Error<
"atomic %select{load|store}0 requires runtime support that is not "
"available for this target">;

View File

@ -24,16 +24,6 @@
using namespace clang;
using namespace CodeGen;
// The ABI values for various atomic memory orderings.
enum AtomicOrderingKind {
AO_ABI_memory_order_relaxed = 0,
AO_ABI_memory_order_consume = 1,
AO_ABI_memory_order_acquire = 2,
AO_ABI_memory_order_release = 3,
AO_ABI_memory_order_acq_rel = 4,
AO_ABI_memory_order_seq_cst = 5
};
namespace {
class AtomicInfo {
CodeGenFunction &CGF;
@ -642,30 +632,30 @@ RValue CodeGenFunction::EmitAtomicExpr(AtomicExpr *E, llvm::Value *Dest) {
if (isa<llvm::ConstantInt>(Order)) {
int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
switch (ord) {
case AO_ABI_memory_order_relaxed:
case AtomicExpr::AO_ABI_memory_order_relaxed:
EmitAtomicOp(*this, E, Dest, Ptr, Val1, Val2, Size, Align,
llvm::Monotonic);
break;
case AO_ABI_memory_order_consume:
case AO_ABI_memory_order_acquire:
case AtomicExpr::AO_ABI_memory_order_consume:
case AtomicExpr::AO_ABI_memory_order_acquire:
if (IsStore)
break; // Avoid crashing on code with undefined behavior
EmitAtomicOp(*this, E, Dest, Ptr, Val1, Val2, Size, Align,
llvm::Acquire);
break;
case AO_ABI_memory_order_release:
case AtomicExpr::AO_ABI_memory_order_release:
if (IsLoad)
break; // Avoid crashing on code with undefined behavior
EmitAtomicOp(*this, E, Dest, Ptr, Val1, Val2, Size, Align,
llvm::Release);
break;
case AO_ABI_memory_order_acq_rel:
case AtomicExpr::AO_ABI_memory_order_acq_rel:
if (IsLoad || IsStore)
break; // Avoid crashing on code with undefined behavior
EmitAtomicOp(*this, E, Dest, Ptr, Val1, Val2, Size, Align,
llvm::AcquireRelease);
break;
case AO_ABI_memory_order_seq_cst:
case AtomicExpr::AO_ABI_memory_order_seq_cst:
EmitAtomicOp(*this, E, Dest, Ptr, Val1, Val2, Size, Align,
llvm::SequentiallyConsistent);
break;
@ -788,8 +778,8 @@ RValue CodeGenFunction::EmitAtomicLoad(LValue src, SourceLocation loc,
getContext().VoidPtrTy);
args.add(RValue::get(EmitCastToVoidPtr(tempAddr)),
getContext().VoidPtrTy);
args.add(RValue::get(llvm::ConstantInt::get(IntTy,
AO_ABI_memory_order_seq_cst)),
args.add(RValue::get(llvm::ConstantInt::get(
IntTy, AtomicExpr::AO_ABI_memory_order_seq_cst)),
getContext().IntTy);
emitAtomicLibcall(*this, "__atomic_load", getContext().VoidTy, args);
@ -938,8 +928,8 @@ void CodeGenFunction::EmitAtomicStore(RValue rvalue, LValue dest, bool isInit) {
getContext().VoidPtrTy);
args.add(RValue::get(EmitCastToVoidPtr(srcAddr)),
getContext().VoidPtrTy);
args.add(RValue::get(llvm::ConstantInt::get(IntTy,
AO_ABI_memory_order_seq_cst)),
args.add(RValue::get(llvm::ConstantInt::get(
IntTy, AtomicExpr::AO_ABI_memory_order_seq_cst)),
getContext().IntTy);
emitAtomicLibcall(*this, "__atomic_store", getContext().VoidTy, args);
return;

View File

@ -911,6 +911,33 @@ bool Sema::CheckOtherCall(CallExpr *TheCall, const FunctionProtoType *Proto) {
return false;
}
static bool isValidOrderingForOp(int64_t Ordering, AtomicExpr::AtomicOp Op) {
if (Ordering < AtomicExpr::AO_ABI_memory_order_relaxed ||
Ordering > AtomicExpr::AO_ABI_memory_order_seq_cst)
return false;
switch (Op) {
case AtomicExpr::AO__c11_atomic_init:
llvm_unreachable("There is no ordering argument for an init");
case AtomicExpr::AO__c11_atomic_load:
case AtomicExpr::AO__atomic_load_n:
case AtomicExpr::AO__atomic_load:
return Ordering != AtomicExpr::AO_ABI_memory_order_release &&
Ordering != AtomicExpr::AO_ABI_memory_order_acq_rel;
case AtomicExpr::AO__c11_atomic_store:
case AtomicExpr::AO__atomic_store:
case AtomicExpr::AO__atomic_store_n:
return Ordering != AtomicExpr::AO_ABI_memory_order_consume &&
Ordering != AtomicExpr::AO_ABI_memory_order_acquire &&
Ordering != AtomicExpr::AO_ABI_memory_order_acq_rel;
default:
return true;
}
}
ExprResult Sema::SemaAtomicOpsOverloaded(ExprResult TheCallResult,
AtomicExpr::AtomicOp Op) {
CallExpr *TheCall = cast<CallExpr>(TheCallResult.get());
@ -1199,7 +1226,16 @@ ExprResult Sema::SemaAtomicOpsOverloaded(ExprResult TheCallResult,
SubExprs.push_back(TheCall->getArg(3)); // Weak
break;
}
if (SubExprs.size() >= 2 && Form != Init) {
llvm::APSInt Result(32);
if (SubExprs[1]->isIntegerConstantExpr(Result, Context) &&
!isValidOrderingForOp(Result.getSExtValue(), Op))
return ExprError(Diag(SubExprs[1]->getLocStart(),
diag::err_atomic_op_has_invalid_memory_order)
<< SubExprs[1]->getSourceRange());
}
AtomicExpr *AE = new (Context) AtomicExpr(TheCall->getCallee()->getLocStart(),
SubExprs, ResultType, Op,
TheCall->getRParenLoc());

View File

@ -315,13 +315,4 @@ void atomic_init_foo()
// CHECK: }
}
// CHECK: @invalid_atomic
void invalid_atomic(_Atomic(int) *i) {
__c11_atomic_store(i, 1, memory_order_consume);
__c11_atomic_store(i, 1, memory_order_acquire);
__c11_atomic_store(i, 1, memory_order_acq_rel);
__c11_atomic_load(i, memory_order_release);
__c11_atomic_load(i, memory_order_acq_rel);
}
#endif

View File

@ -311,13 +311,4 @@ void atomic_init_foo()
// CHECK: }
}
// CHECK: @invalid_atomic
void invalid_atomic(_Atomic(int) *i) {
__c11_atomic_store(i, 1, memory_order_consume);
__c11_atomic_store(i, 1, memory_order_acquire);
__c11_atomic_store(i, 1, memory_order_acq_rel);
__c11_atomic_load(i, memory_order_release);
__c11_atomic_load(i, memory_order_acq_rel);
}
#endif

View File

@ -182,3 +182,225 @@ void PR16931(int* x) { // expected-note {{passing argument to parameter 'x' here
flag flagvar = { 0 };
PR16931(&flagvar); // expected-warning {{incompatible pointer types}}
}
void memory_checks(_Atomic(int) *Ap, int *p, int val) {
(void)__c11_atomic_load(Ap, memory_order_relaxed);
(void)__c11_atomic_load(Ap, memory_order_acquire);
(void)__c11_atomic_load(Ap, memory_order_consume);
(void)__c11_atomic_load(Ap, memory_order_release); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__c11_atomic_load(Ap, memory_order_acq_rel); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__c11_atomic_load(Ap, memory_order_seq_cst);
(void)__c11_atomic_load(Ap, val);
(void)__c11_atomic_load(Ap, -1); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__c11_atomic_load(Ap, 42); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__c11_atomic_store(Ap, val, memory_order_relaxed);
(void)__c11_atomic_store(Ap, val, memory_order_acquire); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__c11_atomic_store(Ap, val, memory_order_consume); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__c11_atomic_store(Ap, val, memory_order_release);
(void)__c11_atomic_store(Ap, val, memory_order_acq_rel); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__c11_atomic_store(Ap, val, memory_order_seq_cst);
(void)__c11_atomic_fetch_add(Ap, 1, memory_order_relaxed);
(void)__c11_atomic_fetch_add(Ap, 1, memory_order_acquire);
(void)__c11_atomic_fetch_add(Ap, 1, memory_order_consume);
(void)__c11_atomic_fetch_add(Ap, 1, memory_order_release);
(void)__c11_atomic_fetch_add(Ap, 1, memory_order_acq_rel);
(void)__c11_atomic_fetch_add(Ap, 1, memory_order_seq_cst);
(void)__c11_atomic_init(Ap, val);
(void)__c11_atomic_init(Ap, val);
(void)__c11_atomic_init(Ap, val);
(void)__c11_atomic_init(Ap, val);
(void)__c11_atomic_init(Ap, val);
(void)__c11_atomic_init(Ap, val);
(void)__c11_atomic_fetch_sub(Ap, val, memory_order_relaxed);
(void)__c11_atomic_fetch_sub(Ap, val, memory_order_acquire);
(void)__c11_atomic_fetch_sub(Ap, val, memory_order_consume);
(void)__c11_atomic_fetch_sub(Ap, val, memory_order_release);
(void)__c11_atomic_fetch_sub(Ap, val, memory_order_acq_rel);
(void)__c11_atomic_fetch_sub(Ap, val, memory_order_seq_cst);
(void)__c11_atomic_fetch_and(Ap, val, memory_order_relaxed);
(void)__c11_atomic_fetch_and(Ap, val, memory_order_acquire);
(void)__c11_atomic_fetch_and(Ap, val, memory_order_consume);
(void)__c11_atomic_fetch_and(Ap, val, memory_order_release);
(void)__c11_atomic_fetch_and(Ap, val, memory_order_acq_rel);
(void)__c11_atomic_fetch_and(Ap, val, memory_order_seq_cst);
(void)__c11_atomic_fetch_or(Ap, val, memory_order_relaxed);
(void)__c11_atomic_fetch_or(Ap, val, memory_order_acquire);
(void)__c11_atomic_fetch_or(Ap, val, memory_order_consume);
(void)__c11_atomic_fetch_or(Ap, val, memory_order_release);
(void)__c11_atomic_fetch_or(Ap, val, memory_order_acq_rel);
(void)__c11_atomic_fetch_or(Ap, val, memory_order_seq_cst);
(void)__c11_atomic_fetch_xor(Ap, val, memory_order_relaxed);
(void)__c11_atomic_fetch_xor(Ap, val, memory_order_acquire);
(void)__c11_atomic_fetch_xor(Ap, val, memory_order_consume);
(void)__c11_atomic_fetch_xor(Ap, val, memory_order_release);
(void)__c11_atomic_fetch_xor(Ap, val, memory_order_acq_rel);
(void)__c11_atomic_fetch_xor(Ap, val, memory_order_seq_cst);
(void)__c11_atomic_exchange(Ap, val, memory_order_relaxed);
(void)__c11_atomic_exchange(Ap, val, memory_order_acquire);
(void)__c11_atomic_exchange(Ap, val, memory_order_consume);
(void)__c11_atomic_exchange(Ap, val, memory_order_release);
(void)__c11_atomic_exchange(Ap, val, memory_order_acq_rel);
(void)__c11_atomic_exchange(Ap, val, memory_order_seq_cst);
(void)__c11_atomic_compare_exchange_strong(Ap, p, val, memory_order_relaxed, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_strong(Ap, p, val, memory_order_acquire, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_strong(Ap, p, val, memory_order_consume, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_strong(Ap, p, val, memory_order_release, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_strong(Ap, p, val, memory_order_acq_rel, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_strong(Ap, p, val, memory_order_seq_cst, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_weak(Ap, p, val, memory_order_relaxed, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_weak(Ap, p, val, memory_order_acquire, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_weak(Ap, p, val, memory_order_consume, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_weak(Ap, p, val, memory_order_release, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_weak(Ap, p, val, memory_order_acq_rel, memory_order_relaxed);
(void)__c11_atomic_compare_exchange_weak(Ap, p, val, memory_order_seq_cst, memory_order_relaxed);
(void)__atomic_load_n(p, memory_order_relaxed);
(void)__atomic_load_n(p, memory_order_acquire);
(void)__atomic_load_n(p, memory_order_consume);
(void)__atomic_load_n(p, memory_order_release); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_load_n(p, memory_order_acq_rel); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_load_n(p, memory_order_seq_cst);
(void)__atomic_load(p, p, memory_order_relaxed);
(void)__atomic_load(p, p, memory_order_acquire);
(void)__atomic_load(p, p, memory_order_consume);
(void)__atomic_load(p, p, memory_order_release); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_load(p, p, memory_order_acq_rel); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_load(p, p, memory_order_seq_cst);
(void)__atomic_store(p, p, memory_order_relaxed);
(void)__atomic_store(p, p, memory_order_acquire); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_store(p, p, memory_order_consume); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_store(p, p, memory_order_release);
(void)__atomic_store(p, p, memory_order_acq_rel); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_store(p, p, memory_order_seq_cst);
(void)__atomic_store_n(p, val, memory_order_relaxed);
(void)__atomic_store_n(p, val, memory_order_acquire); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_store_n(p, val, memory_order_consume); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_store_n(p, val, memory_order_release);
(void)__atomic_store_n(p, val, memory_order_acq_rel); // expected-error {{memory order argument to atomic operation is invalid}}
(void)__atomic_store_n(p, val, memory_order_seq_cst);
(void)__atomic_fetch_add(p, val, memory_order_relaxed);
(void)__atomic_fetch_add(p, val, memory_order_acquire);
(void)__atomic_fetch_add(p, val, memory_order_consume);
(void)__atomic_fetch_add(p, val, memory_order_release);
(void)__atomic_fetch_add(p, val, memory_order_acq_rel);
(void)__atomic_fetch_add(p, val, memory_order_seq_cst);
(void)__atomic_fetch_sub(p, val, memory_order_relaxed);
(void)__atomic_fetch_sub(p, val, memory_order_acquire);
(void)__atomic_fetch_sub(p, val, memory_order_consume);
(void)__atomic_fetch_sub(p, val, memory_order_release);
(void)__atomic_fetch_sub(p, val, memory_order_acq_rel);
(void)__atomic_fetch_sub(p, val, memory_order_seq_cst);
(void)__atomic_add_fetch(p, val, memory_order_relaxed);
(void)__atomic_add_fetch(p, val, memory_order_acquire);
(void)__atomic_add_fetch(p, val, memory_order_consume);
(void)__atomic_add_fetch(p, val, memory_order_release);
(void)__atomic_add_fetch(p, val, memory_order_acq_rel);
(void)__atomic_add_fetch(p, val, memory_order_seq_cst);
(void)__atomic_sub_fetch(p, val, memory_order_relaxed);
(void)__atomic_sub_fetch(p, val, memory_order_acquire);
(void)__atomic_sub_fetch(p, val, memory_order_consume);
(void)__atomic_sub_fetch(p, val, memory_order_release);
(void)__atomic_sub_fetch(p, val, memory_order_acq_rel);
(void)__atomic_sub_fetch(p, val, memory_order_seq_cst);
(void)__atomic_fetch_and(p, val, memory_order_relaxed);
(void)__atomic_fetch_and(p, val, memory_order_acquire);
(void)__atomic_fetch_and(p, val, memory_order_consume);
(void)__atomic_fetch_and(p, val, memory_order_release);
(void)__atomic_fetch_and(p, val, memory_order_acq_rel);
(void)__atomic_fetch_and(p, val, memory_order_seq_cst);
(void)__atomic_fetch_or(p, val, memory_order_relaxed);
(void)__atomic_fetch_or(p, val, memory_order_acquire);
(void)__atomic_fetch_or(p, val, memory_order_consume);
(void)__atomic_fetch_or(p, val, memory_order_release);
(void)__atomic_fetch_or(p, val, memory_order_acq_rel);
(void)__atomic_fetch_or(p, val, memory_order_seq_cst);
(void)__atomic_fetch_xor(p, val, memory_order_relaxed);
(void)__atomic_fetch_xor(p, val, memory_order_acquire);
(void)__atomic_fetch_xor(p, val, memory_order_consume);
(void)__atomic_fetch_xor(p, val, memory_order_release);
(void)__atomic_fetch_xor(p, val, memory_order_acq_rel);
(void)__atomic_fetch_xor(p, val, memory_order_seq_cst);
(void)__atomic_fetch_nand(p, val, memory_order_relaxed);
(void)__atomic_fetch_nand(p, val, memory_order_acquire);
(void)__atomic_fetch_nand(p, val, memory_order_consume);
(void)__atomic_fetch_nand(p, val, memory_order_release);
(void)__atomic_fetch_nand(p, val, memory_order_acq_rel);
(void)__atomic_fetch_nand(p, val, memory_order_seq_cst);
(void)__atomic_and_fetch(p, val, memory_order_relaxed);
(void)__atomic_and_fetch(p, val, memory_order_acquire);
(void)__atomic_and_fetch(p, val, memory_order_consume);
(void)__atomic_and_fetch(p, val, memory_order_release);
(void)__atomic_and_fetch(p, val, memory_order_acq_rel);
(void)__atomic_and_fetch(p, val, memory_order_seq_cst);
(void)__atomic_or_fetch(p, val, memory_order_relaxed);
(void)__atomic_or_fetch(p, val, memory_order_acquire);
(void)__atomic_or_fetch(p, val, memory_order_consume);
(void)__atomic_or_fetch(p, val, memory_order_release);
(void)__atomic_or_fetch(p, val, memory_order_acq_rel);
(void)__atomic_or_fetch(p, val, memory_order_seq_cst);
(void)__atomic_xor_fetch(p, val, memory_order_relaxed);
(void)__atomic_xor_fetch(p, val, memory_order_acquire);
(void)__atomic_xor_fetch(p, val, memory_order_consume);
(void)__atomic_xor_fetch(p, val, memory_order_release);
(void)__atomic_xor_fetch(p, val, memory_order_acq_rel);
(void)__atomic_xor_fetch(p, val, memory_order_seq_cst);
(void)__atomic_nand_fetch(p, val, memory_order_relaxed);
(void)__atomic_nand_fetch(p, val, memory_order_acquire);
(void)__atomic_nand_fetch(p, val, memory_order_consume);
(void)__atomic_nand_fetch(p, val, memory_order_release);
(void)__atomic_nand_fetch(p, val, memory_order_acq_rel);
(void)__atomic_nand_fetch(p, val, memory_order_seq_cst);
(void)__atomic_exchange_n(p, val, memory_order_relaxed);
(void)__atomic_exchange_n(p, val, memory_order_acquire);
(void)__atomic_exchange_n(p, val, memory_order_consume);
(void)__atomic_exchange_n(p, val, memory_order_release);
(void)__atomic_exchange_n(p, val, memory_order_acq_rel);
(void)__atomic_exchange_n(p, val, memory_order_seq_cst);
(void)__atomic_exchange(p, p, p, memory_order_relaxed);
(void)__atomic_exchange(p, p, p, memory_order_acquire);
(void)__atomic_exchange(p, p, p, memory_order_consume);
(void)__atomic_exchange(p, p, p, memory_order_release);
(void)__atomic_exchange(p, p, p, memory_order_acq_rel);
(void)__atomic_exchange(p, p, p, memory_order_seq_cst);
(void)__atomic_compare_exchange(p, p, p, 0, memory_order_relaxed, memory_order_relaxed);
(void)__atomic_compare_exchange(p, p, p, 0, memory_order_acquire, memory_order_relaxed);
(void)__atomic_compare_exchange(p, p, p, 0, memory_order_consume, memory_order_relaxed);
(void)__atomic_compare_exchange(p, p, p, 0, memory_order_release, memory_order_relaxed);
(void)__atomic_compare_exchange(p, p, p, 0, memory_order_acq_rel, memory_order_relaxed);
(void)__atomic_compare_exchange(p, p, p, 0, memory_order_seq_cst, memory_order_relaxed);
(void)__atomic_compare_exchange_n(p, p, val, 0, memory_order_relaxed, memory_order_relaxed);
(void)__atomic_compare_exchange_n(p, p, val, 0, memory_order_acquire, memory_order_relaxed);
(void)__atomic_compare_exchange_n(p, p, val, 0, memory_order_consume, memory_order_relaxed);
(void)__atomic_compare_exchange_n(p, p, val, 0, memory_order_release, memory_order_relaxed);
(void)__atomic_compare_exchange_n(p, p, val, 0, memory_order_acq_rel, memory_order_relaxed);
(void)__atomic_compare_exchange_n(p, p, val, 0, memory_order_seq_cst, memory_order_relaxed);
}