[MS Volatile] Don't make volatile loads/stores to underaligned objects atomic

Underaligned atomic LValues require libcalls which MSVC doesn't have.
MSVC doesn't seem to consider such operations as requiring a barrier
anyway.

This fixes PR27843.

llvm-svn: 270576
This commit is contained in:
David Majnemer 2016-05-24 16:09:25 +00:00
parent ef88dc8fe4
commit a38c9f1fa5
4 changed files with 23 additions and 32 deletions

View File

@ -1274,31 +1274,11 @@ bool CodeGenFunction::LValueIsSuitableForInlineAtomic(LValue LV) {
bool IsVolatile = LV.isVolatile() || hasVolatileMember(LV.getType());
// An atomic is inline if we don't need to use a libcall.
bool AtomicIsInline = !AI.shouldUseLibcall();
return IsVolatile && AtomicIsInline;
}
/// An type is a candidate for having its loads and stores be made atomic if
/// we are operating under /volatile:ms *and* we know the access is volatile and
/// performing such an operation can be performed without a libcall.
bool CodeGenFunction::typeIsSuitableForInlineAtomic(QualType Ty,
bool IsVolatile) const {
// The operation must be volatile for us to make it atomic.
if (!IsVolatile)
return false;
// The -fms-volatile flag must be passed for us to adopt this behavior.
if (!CGM.getCodeGenOpts().MSVolatile)
return false;
// An atomic is inline if we don't need to use a libcall (e.g. it is builtin).
if (!getContext().getTargetInfo().hasBuiltinAtomic(
getContext().getTypeSize(Ty), getContext().getTypeAlign(Ty)))
return false;
// MSVC doesn't seem to do this for types wider than a pointer.
if (getContext().getTypeSize(Ty) >
if (getContext().getTypeSize(LV.getType()) >
getContext().getTypeSize(getContext().getIntPtrType()))
return false;
return true;
return IsVolatile && AtomicIsInline;
}
RValue CodeGenFunction::EmitAtomicLoad(LValue LV, SourceLocation SL,

View File

@ -1273,10 +1273,10 @@ llvm::Value *CodeGenFunction::EmitLoadOfScalar(Address Addr, bool Volatile,
}
// Atomic operations have to be done on integral types.
if (Ty->isAtomicType() || typeIsSuitableForInlineAtomic(Ty, Volatile)) {
LValue lvalue =
LValue AtomicLValue =
LValue::MakeAddr(Addr, Ty, getContext(), AlignSource, TBAAInfo);
return EmitAtomicLoad(lvalue, Loc).getScalarVal();
if (Ty->isAtomicType() || LValueIsSuitableForInlineAtomic(AtomicLValue)) {
return EmitAtomicLoad(AtomicLValue, Loc).getScalarVal();
}
llvm::LoadInst *Load = Builder.CreateLoad(Addr, Volatile);
@ -1384,12 +1384,11 @@ void CodeGenFunction::EmitStoreOfScalar(llvm::Value *Value, Address Addr,
Value = EmitToMemory(Value, Ty);
LValue AtomicLValue =
LValue::MakeAddr(Addr, Ty, getContext(), AlignSource, TBAAInfo);
if (Ty->isAtomicType() ||
(!isInit && typeIsSuitableForInlineAtomic(Ty, Volatile))) {
EmitAtomicStore(RValue::get(Value),
LValue::MakeAddr(Addr, Ty, getContext(),
AlignSource, TBAAInfo),
isInit);
(!isInit && LValueIsSuitableForInlineAtomic(AtomicLValue))) {
EmitAtomicStore(RValue::get(Value), AtomicLValue, isInit);
return;
}

View File

@ -2496,7 +2496,6 @@ public:
void EmitAtomicInit(Expr *E, LValue lvalue);
bool LValueIsSuitableForInlineAtomic(LValue Src);
bool typeIsSuitableForInlineAtomic(QualType Ty, bool IsVolatile) const;
RValue EmitAtomicLoad(LValue LV, SourceLocation SL,
AggValueSlot Slot = AggValueSlot::ignored());

View File

@ -7,6 +7,13 @@ struct bar {
};
typedef _Complex float __declspec(align(8)) baz;
#pragma pack(push)
#pragma pack(1)
struct qux {
volatile int f;
};
#pragma pack(pop)
void test1(struct foo *p, struct foo *q) {
*p = *q;
// CHECK-LABEL: @test1
@ -58,7 +65,8 @@ void test8(volatile double *p, volatile double *q) {
void test9(volatile baz *p, baz *q) {
*p = *q;
// CHECK-LABEL: @test9
// CHECK: store atomic volatile {{.*}}, {{.*}} release
// CHECK: store volatile {{.*}}, {{.*}}
// CHECK: store volatile {{.*}}, {{.*}}
}
void test10(volatile long long *p, volatile long long *q) {
*p = *q;
@ -72,3 +80,8 @@ void test11(volatile float *p, volatile float *q) {
// CHECK: load atomic volatile {{.*}} acquire
// CHECK: store atomic volatile {{.*}}, {{.*}} release
}
int test12(struct qux *p) {
return p->f;
// CHECK-LABEL: @test12
// CHECK: load volatile {{.*}}
}