forked from OSchip/llvm-project
Remove custom handling of array copies in lambda by-value array capture and
copy constructors of classes with array members, instead using ArrayInitLoopExpr to represent the initialization loop. This exposed a bug in the static analyzer where it was unable to differentiate between zero-initialized and unknown array values, which has also been fixed here. llvm-svn: 289618
This commit is contained in:
parent
54eb192b25
commit
30e304e2a6
|
@ -1928,8 +1928,7 @@ public:
|
|||
/// B(A& a) : A(a), f(3.14159) { }
|
||||
/// };
|
||||
/// \endcode
|
||||
class CXXCtorInitializer final
|
||||
: private llvm::TrailingObjects<CXXCtorInitializer, VarDecl *> {
|
||||
class CXXCtorInitializer final {
|
||||
/// \brief Either the base class name/delegating constructor type (stored as
|
||||
/// a TypeSourceInfo*), an normal field (FieldDecl), or an anonymous field
|
||||
/// (IndirectFieldDecl*) being initialized.
|
||||
|
@ -1967,14 +1966,8 @@ class CXXCtorInitializer final
|
|||
unsigned IsWritten : 1;
|
||||
|
||||
/// If IsWritten is true, then this number keeps track of the textual order
|
||||
/// of this initializer in the original sources, counting from 0; otherwise,
|
||||
/// it stores the number of array index variables stored after this object
|
||||
/// in memory.
|
||||
unsigned SourceOrderOrNumArrayIndices : 13;
|
||||
|
||||
CXXCtorInitializer(ASTContext &Context, FieldDecl *Member,
|
||||
SourceLocation MemberLoc, SourceLocation L, Expr *Init,
|
||||
SourceLocation R, VarDecl **Indices, unsigned NumIndices);
|
||||
/// of this initializer in the original sources, counting from 0.
|
||||
unsigned SourceOrder : 13;
|
||||
|
||||
public:
|
||||
/// \brief Creates a new base-class initializer.
|
||||
|
@ -2000,13 +1993,6 @@ public:
|
|||
CXXCtorInitializer(ASTContext &Context, TypeSourceInfo *TInfo,
|
||||
SourceLocation L, Expr *Init, SourceLocation R);
|
||||
|
||||
/// \brief Creates a new member initializer that optionally contains
|
||||
/// array indices used to describe an elementwise initialization.
|
||||
static CXXCtorInitializer *Create(ASTContext &Context, FieldDecl *Member,
|
||||
SourceLocation MemberLoc, SourceLocation L,
|
||||
Expr *Init, SourceLocation R,
|
||||
VarDecl **Indices, unsigned NumIndices);
|
||||
|
||||
/// \brief Determine whether this initializer is initializing a base class.
|
||||
bool isBaseInitializer() const {
|
||||
return Initializee.is<TypeSourceInfo*>() && !IsDelegating;
|
||||
|
@ -2111,7 +2097,7 @@ public:
|
|||
/// \brief Return the source position of the initializer, counting from 0.
|
||||
/// If the initializer was implicit, -1 is returned.
|
||||
int getSourceOrder() const {
|
||||
return IsWritten ? static_cast<int>(SourceOrderOrNumArrayIndices) : -1;
|
||||
return IsWritten ? static_cast<int>(SourceOrder) : -1;
|
||||
}
|
||||
|
||||
/// \brief Set the source order of this initializer.
|
||||
|
@ -2121,49 +2107,22 @@ public:
|
|||
///
|
||||
/// This assumes that the initializer was written in the source code, and
|
||||
/// ensures that isWritten() returns true.
|
||||
void setSourceOrder(int pos) {
|
||||
void setSourceOrder(int Pos) {
|
||||
assert(!IsWritten &&
|
||||
"setSourceOrder() used on implicit initializer");
|
||||
assert(SourceOrder == 0 &&
|
||||
"calling twice setSourceOrder() on the same initializer");
|
||||
assert(SourceOrderOrNumArrayIndices == 0 &&
|
||||
"setSourceOrder() used when there are implicit array indices");
|
||||
assert(pos >= 0 &&
|
||||
assert(Pos >= 0 &&
|
||||
"setSourceOrder() used to make an initializer implicit");
|
||||
IsWritten = true;
|
||||
SourceOrderOrNumArrayIndices = static_cast<unsigned>(pos);
|
||||
SourceOrder = static_cast<unsigned>(Pos);
|
||||
}
|
||||
|
||||
SourceLocation getLParenLoc() const { return LParenLoc; }
|
||||
SourceLocation getRParenLoc() const { return RParenLoc; }
|
||||
|
||||
/// \brief Determine the number of implicit array indices used while
|
||||
/// described an array member initialization.
|
||||
unsigned getNumArrayIndices() const {
|
||||
return IsWritten ? 0 : SourceOrderOrNumArrayIndices;
|
||||
}
|
||||
|
||||
/// \brief Retrieve a particular array index variable used to
|
||||
/// describe an array member initialization.
|
||||
VarDecl *getArrayIndex(unsigned I) {
|
||||
assert(I < getNumArrayIndices() && "Out of bounds member array index");
|
||||
return getTrailingObjects<VarDecl *>()[I];
|
||||
}
|
||||
const VarDecl *getArrayIndex(unsigned I) const {
|
||||
assert(I < getNumArrayIndices() && "Out of bounds member array index");
|
||||
return getTrailingObjects<VarDecl *>()[I];
|
||||
}
|
||||
void setArrayIndex(unsigned I, VarDecl *Index) {
|
||||
assert(I < getNumArrayIndices() && "Out of bounds member array index");
|
||||
getTrailingObjects<VarDecl *>()[I] = Index;
|
||||
}
|
||||
ArrayRef<VarDecl *> getArrayIndices() {
|
||||
return llvm::makeArrayRef(getTrailingObjects<VarDecl *>(),
|
||||
getNumArrayIndices());
|
||||
}
|
||||
|
||||
/// \brief Get the initializer.
|
||||
Expr *getInit() const { return static_cast<Expr*>(Init); }
|
||||
|
||||
friend TrailingObjects;
|
||||
};
|
||||
|
||||
/// Description of a constructor that was inherited from a base class.
|
||||
|
|
|
@ -1513,9 +1513,8 @@ public:
|
|||
/// C++1y introduces a new form of "capture" called an init-capture that
|
||||
/// includes an initializing expression (rather than capturing a variable),
|
||||
/// and which can never occur implicitly.
|
||||
class LambdaExpr final
|
||||
: public Expr,
|
||||
private llvm::TrailingObjects<LambdaExpr, Stmt *, unsigned, VarDecl *> {
|
||||
class LambdaExpr final : public Expr,
|
||||
private llvm::TrailingObjects<LambdaExpr, Stmt *> {
|
||||
/// \brief The source range that covers the lambda introducer ([...]).
|
||||
SourceRange IntroducerRange;
|
||||
|
||||
|
@ -1536,10 +1535,6 @@ class LambdaExpr final
|
|||
/// \brief Whether this lambda had the result type explicitly specified.
|
||||
unsigned ExplicitResultType : 1;
|
||||
|
||||
/// \brief Whether there are any array index variables stored at the end of
|
||||
/// this lambda expression.
|
||||
unsigned HasArrayIndexVars : 1;
|
||||
|
||||
/// \brief The location of the closing brace ('}') that completes
|
||||
/// the lambda.
|
||||
///
|
||||
|
@ -1550,28 +1545,19 @@ class LambdaExpr final
|
|||
/// module file just to determine the source range.
|
||||
SourceLocation ClosingBrace;
|
||||
|
||||
size_t numTrailingObjects(OverloadToken<Stmt *>) const {
|
||||
return NumCaptures + 1;
|
||||
}
|
||||
|
||||
size_t numTrailingObjects(OverloadToken<unsigned>) const {
|
||||
return HasArrayIndexVars ? NumCaptures + 1 : 0;
|
||||
}
|
||||
|
||||
/// \brief Construct a lambda expression.
|
||||
LambdaExpr(QualType T, SourceRange IntroducerRange,
|
||||
LambdaCaptureDefault CaptureDefault,
|
||||
SourceLocation CaptureDefaultLoc, ArrayRef<LambdaCapture> Captures,
|
||||
bool ExplicitParams, bool ExplicitResultType,
|
||||
ArrayRef<Expr *> CaptureInits, ArrayRef<VarDecl *> ArrayIndexVars,
|
||||
ArrayRef<unsigned> ArrayIndexStarts, SourceLocation ClosingBrace,
|
||||
ArrayRef<Expr *> CaptureInits, SourceLocation ClosingBrace,
|
||||
bool ContainsUnexpandedParameterPack);
|
||||
|
||||
/// \brief Construct an empty lambda expression.
|
||||
LambdaExpr(EmptyShell Empty, unsigned NumCaptures, bool HasArrayIndexVars)
|
||||
LambdaExpr(EmptyShell Empty, unsigned NumCaptures)
|
||||
: Expr(LambdaExprClass, Empty),
|
||||
NumCaptures(NumCaptures), CaptureDefault(LCD_None), ExplicitParams(false),
|
||||
ExplicitResultType(false), HasArrayIndexVars(true) {
|
||||
ExplicitResultType(false) {
|
||||
getStoredStmts()[NumCaptures] = nullptr;
|
||||
}
|
||||
|
||||
|
@ -1579,21 +1565,6 @@ class LambdaExpr final
|
|||
|
||||
Stmt *const *getStoredStmts() const { return getTrailingObjects<Stmt *>(); }
|
||||
|
||||
/// \brief Retrieve the mapping from captures to the first array index
|
||||
/// variable.
|
||||
unsigned *getArrayIndexStarts() { return getTrailingObjects<unsigned>(); }
|
||||
|
||||
const unsigned *getArrayIndexStarts() const {
|
||||
return getTrailingObjects<unsigned>();
|
||||
}
|
||||
|
||||
/// \brief Retrieve the complete set of array-index variables.
|
||||
VarDecl **getArrayIndexVars() { return getTrailingObjects<VarDecl *>(); }
|
||||
|
||||
VarDecl *const *getArrayIndexVars() const {
|
||||
return getTrailingObjects<VarDecl *>();
|
||||
}
|
||||
|
||||
public:
|
||||
/// \brief Construct a new lambda expression.
|
||||
static LambdaExpr *
|
||||
|
@ -1601,15 +1572,12 @@ public:
|
|||
LambdaCaptureDefault CaptureDefault, SourceLocation CaptureDefaultLoc,
|
||||
ArrayRef<LambdaCapture> Captures, bool ExplicitParams,
|
||||
bool ExplicitResultType, ArrayRef<Expr *> CaptureInits,
|
||||
ArrayRef<VarDecl *> ArrayIndexVars,
|
||||
ArrayRef<unsigned> ArrayIndexStarts, SourceLocation ClosingBrace,
|
||||
bool ContainsUnexpandedParameterPack);
|
||||
SourceLocation ClosingBrace, bool ContainsUnexpandedParameterPack);
|
||||
|
||||
/// \brief Construct a new lambda expression that will be deserialized from
|
||||
/// an external source.
|
||||
static LambdaExpr *CreateDeserialized(const ASTContext &C,
|
||||
unsigned NumCaptures,
|
||||
unsigned NumArrayIndexVars);
|
||||
unsigned NumCaptures);
|
||||
|
||||
/// \brief Determine the default capture kind for this lambda.
|
||||
LambdaCaptureDefault getCaptureDefault() const {
|
||||
|
@ -1708,14 +1676,6 @@ public:
|
|||
return capture_init_begin() + NumCaptures;
|
||||
}
|
||||
|
||||
/// \brief Retrieve the set of index variables used in the capture
|
||||
/// initializer of an array captured by copy.
|
||||
///
|
||||
/// \param Iter The iterator that points at the capture initializer for
|
||||
/// which we are extracting the corresponding index variables.
|
||||
ArrayRef<VarDecl *>
|
||||
getCaptureInitIndexVars(const_capture_init_iterator Iter) const;
|
||||
|
||||
/// \brief Retrieve the source range covering the lambda introducer,
|
||||
/// which contains the explicit capture list surrounded by square
|
||||
/// brackets ([...]).
|
||||
|
|
|
@ -886,16 +886,6 @@ bool RecursiveASTVisitor<Derived>::TraverseConstructorInitializer(
|
|||
if (Init->isWritten() || getDerived().shouldVisitImplicitCode())
|
||||
TRY_TO(TraverseStmt(Init->getInit()));
|
||||
|
||||
if (getDerived().shouldVisitImplicitCode())
|
||||
// The braces for this one-line loop are required for MSVC2013. It
|
||||
// refuses to compile
|
||||
// for (int i : int_vec)
|
||||
// do {} while(false);
|
||||
// without braces on the for loop.
|
||||
for (VarDecl *VD : Init->getArrayIndices()) {
|
||||
TRY_TO(TraverseDecl(VD));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -2399,7 +2389,12 @@ DEF_TRAVERSE_STMT(ExtVectorElementExpr, {})
|
|||
DEF_TRAVERSE_STMT(GNUNullExpr, {})
|
||||
DEF_TRAVERSE_STMT(ImplicitValueInitExpr, {})
|
||||
DEF_TRAVERSE_STMT(NoInitExpr, {})
|
||||
DEF_TRAVERSE_STMT(ArrayInitLoopExpr, {})
|
||||
DEF_TRAVERSE_STMT(ArrayInitLoopExpr, {
|
||||
// FIXME: The source expression of the OVE should be listed as
|
||||
// a child of the ArrayInitLoopExpr.
|
||||
if (OpaqueValueExpr *OVE = S->getCommonExpr())
|
||||
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(OVE->getSourceExpr());
|
||||
})
|
||||
DEF_TRAVERSE_STMT(ArrayInitIndexExpr, {})
|
||||
DEF_TRAVERSE_STMT(ObjCBoolLiteralExpr, {})
|
||||
|
||||
|
|
|
@ -274,6 +274,8 @@ namespace clang {
|
|||
Expr *VisitMemberExpr(MemberExpr *E);
|
||||
Expr *VisitCallExpr(CallExpr *E);
|
||||
Expr *VisitInitListExpr(InitListExpr *E);
|
||||
Expr *VisitArrayInitLoopExpr(ArrayInitLoopExpr *E);
|
||||
Expr *VisitArrayInitIndexExpr(ArrayInitIndexExpr *E);
|
||||
Expr *VisitCXXDefaultInitExpr(CXXDefaultInitExpr *E);
|
||||
Expr *VisitCXXNamedCastExpr(CXXNamedCastExpr *E);
|
||||
|
||||
|
@ -6564,6 +6566,30 @@ Expr *ASTNodeImporter::VisitInitListExpr(InitListExpr *ILE) {
|
|||
return To;
|
||||
}
|
||||
|
||||
Expr *ASTNodeImporter::VisitArrayInitLoopExpr(ArrayInitLoopExpr *E) {
|
||||
QualType ToType = Importer.Import(E->getType());
|
||||
if (ToType.isNull())
|
||||
return nullptr;
|
||||
|
||||
Expr *ToCommon = Importer.Import(E->getCommonExpr());
|
||||
if (!ToCommon && E->getCommonExpr())
|
||||
return nullptr;
|
||||
|
||||
Expr *ToSubExpr = Importer.Import(E->getSubExpr());
|
||||
if (!ToSubExpr && E->getSubExpr())
|
||||
return nullptr;
|
||||
|
||||
return new (Importer.getToContext())
|
||||
ArrayInitLoopExpr(ToType, ToCommon, ToSubExpr);
|
||||
}
|
||||
|
||||
Expr *ASTNodeImporter::VisitArrayInitIndexExpr(ArrayInitIndexExpr *E) {
|
||||
QualType ToType = Importer.Import(E->getType());
|
||||
if (ToType.isNull())
|
||||
return nullptr;
|
||||
return new (Importer.getToContext()) ArrayInitIndexExpr(ToType);
|
||||
}
|
||||
|
||||
Expr *ASTNodeImporter::VisitCXXDefaultInitExpr(CXXDefaultInitExpr *DIE) {
|
||||
FieldDecl *ToField = llvm::dyn_cast_or_null<FieldDecl>(
|
||||
Importer.Import(DIE->getField()));
|
||||
|
@ -7054,25 +7080,6 @@ CXXCtorInitializer *ASTImporter::Import(CXXCtorInitializer *From) {
|
|||
return new (ToContext)
|
||||
CXXCtorInitializer(ToContext, ToTInfo, Import(From->getLParenLoc()),
|
||||
ToExpr, Import(From->getRParenLoc()));
|
||||
} else if (unsigned NumArrayIndices = From->getNumArrayIndices()) {
|
||||
FieldDecl *ToField =
|
||||
llvm::cast_or_null<FieldDecl>(Import(From->getMember()));
|
||||
if (!ToField && From->getMember())
|
||||
return nullptr;
|
||||
|
||||
SmallVector<VarDecl *, 4> ToAIs(NumArrayIndices);
|
||||
|
||||
for (unsigned AII = 0; AII < NumArrayIndices; ++AII) {
|
||||
VarDecl *ToArrayIndex =
|
||||
dyn_cast_or_null<VarDecl>(Import(From->getArrayIndex(AII)));
|
||||
if (!ToArrayIndex && From->getArrayIndex(AII))
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
return CXXCtorInitializer::Create(
|
||||
ToContext, ToField, Import(From->getMemberLocation()),
|
||||
Import(From->getLParenLoc()), ToExpr, Import(From->getRParenLoc()),
|
||||
ToAIs.data(), NumArrayIndices);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
|
|
|
@ -1739,7 +1739,7 @@ CXXCtorInitializer::CXXCtorInitializer(ASTContext &Context,
|
|||
SourceLocation EllipsisLoc)
|
||||
: Initializee(TInfo), MemberOrEllipsisLocation(EllipsisLoc), Init(Init),
|
||||
LParenLoc(L), RParenLoc(R), IsDelegating(false), IsVirtual(IsVirtual),
|
||||
IsWritten(false), SourceOrderOrNumArrayIndices(0)
|
||||
IsWritten(false), SourceOrder(0)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -1750,7 +1750,7 @@ CXXCtorInitializer::CXXCtorInitializer(ASTContext &Context,
|
|||
SourceLocation R)
|
||||
: Initializee(Member), MemberOrEllipsisLocation(MemberLoc), Init(Init),
|
||||
LParenLoc(L), RParenLoc(R), IsDelegating(false), IsVirtual(false),
|
||||
IsWritten(false), SourceOrderOrNumArrayIndices(0)
|
||||
IsWritten(false), SourceOrder(0)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -1761,7 +1761,7 @@ CXXCtorInitializer::CXXCtorInitializer(ASTContext &Context,
|
|||
SourceLocation R)
|
||||
: Initializee(Member), MemberOrEllipsisLocation(MemberLoc), Init(Init),
|
||||
LParenLoc(L), RParenLoc(R), IsDelegating(false), IsVirtual(false),
|
||||
IsWritten(false), SourceOrderOrNumArrayIndices(0)
|
||||
IsWritten(false), SourceOrder(0)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -1771,38 +1771,10 @@ CXXCtorInitializer::CXXCtorInitializer(ASTContext &Context,
|
|||
SourceLocation R)
|
||||
: Initializee(TInfo), MemberOrEllipsisLocation(), Init(Init),
|
||||
LParenLoc(L), RParenLoc(R), IsDelegating(true), IsVirtual(false),
|
||||
IsWritten(false), SourceOrderOrNumArrayIndices(0)
|
||||
IsWritten(false), SourceOrder(0)
|
||||
{
|
||||
}
|
||||
|
||||
CXXCtorInitializer::CXXCtorInitializer(ASTContext &Context,
|
||||
FieldDecl *Member,
|
||||
SourceLocation MemberLoc,
|
||||
SourceLocation L, Expr *Init,
|
||||
SourceLocation R,
|
||||
VarDecl **Indices,
|
||||
unsigned NumIndices)
|
||||
: Initializee(Member), MemberOrEllipsisLocation(MemberLoc), Init(Init),
|
||||
LParenLoc(L), RParenLoc(R), IsDelegating(false), IsVirtual(false),
|
||||
IsWritten(false), SourceOrderOrNumArrayIndices(NumIndices)
|
||||
{
|
||||
std::uninitialized_copy(Indices, Indices + NumIndices,
|
||||
getTrailingObjects<VarDecl *>());
|
||||
}
|
||||
|
||||
CXXCtorInitializer *CXXCtorInitializer::Create(ASTContext &Context,
|
||||
FieldDecl *Member,
|
||||
SourceLocation MemberLoc,
|
||||
SourceLocation L, Expr *Init,
|
||||
SourceLocation R,
|
||||
VarDecl **Indices,
|
||||
unsigned NumIndices) {
|
||||
void *Mem = Context.Allocate(totalSizeToAlloc<VarDecl *>(NumIndices),
|
||||
alignof(CXXCtorInitializer));
|
||||
return new (Mem) CXXCtorInitializer(Context, Member, MemberLoc, L, Init, R,
|
||||
Indices, NumIndices);
|
||||
}
|
||||
|
||||
TypeLoc CXXCtorInitializer::getBaseClassLoc() const {
|
||||
if (isBaseInitializer())
|
||||
return Initializee.get<TypeSourceInfo*>()->getTypeLoc();
|
||||
|
|
|
@ -869,8 +869,6 @@ LambdaExpr::LambdaExpr(QualType T, SourceRange IntroducerRange,
|
|||
SourceLocation CaptureDefaultLoc,
|
||||
ArrayRef<LambdaCapture> Captures, bool ExplicitParams,
|
||||
bool ExplicitResultType, ArrayRef<Expr *> CaptureInits,
|
||||
ArrayRef<VarDecl *> ArrayIndexVars,
|
||||
ArrayRef<unsigned> ArrayIndexStarts,
|
||||
SourceLocation ClosingBrace,
|
||||
bool ContainsUnexpandedParameterPack)
|
||||
: Expr(LambdaExprClass, T, VK_RValue, OK_Ordinary, T->isDependentType(),
|
||||
|
@ -907,17 +905,6 @@ LambdaExpr::LambdaExpr(QualType T, SourceRange IntroducerRange,
|
|||
|
||||
// Copy the body of the lambda.
|
||||
*Stored++ = getCallOperator()->getBody();
|
||||
|
||||
// Copy the array index variables, if any.
|
||||
HasArrayIndexVars = !ArrayIndexVars.empty();
|
||||
if (HasArrayIndexVars) {
|
||||
assert(ArrayIndexStarts.size() == NumCaptures);
|
||||
memcpy(getArrayIndexVars(), ArrayIndexVars.data(),
|
||||
sizeof(VarDecl *) * ArrayIndexVars.size());
|
||||
memcpy(getArrayIndexStarts(), ArrayIndexStarts.data(),
|
||||
sizeof(unsigned) * Captures.size());
|
||||
getArrayIndexStarts()[Captures.size()] = ArrayIndexVars.size();
|
||||
}
|
||||
}
|
||||
|
||||
LambdaExpr *LambdaExpr::Create(
|
||||
|
@ -925,31 +912,24 @@ LambdaExpr *LambdaExpr::Create(
|
|||
SourceRange IntroducerRange, LambdaCaptureDefault CaptureDefault,
|
||||
SourceLocation CaptureDefaultLoc, ArrayRef<LambdaCapture> Captures,
|
||||
bool ExplicitParams, bool ExplicitResultType, ArrayRef<Expr *> CaptureInits,
|
||||
ArrayRef<VarDecl *> ArrayIndexVars, ArrayRef<unsigned> ArrayIndexStarts,
|
||||
SourceLocation ClosingBrace, bool ContainsUnexpandedParameterPack) {
|
||||
// Determine the type of the expression (i.e., the type of the
|
||||
// function object we're creating).
|
||||
QualType T = Context.getTypeDeclType(Class);
|
||||
|
||||
unsigned Size = totalSizeToAlloc<Stmt *, unsigned, VarDecl *>(
|
||||
Captures.size() + 1, ArrayIndexVars.empty() ? 0 : Captures.size() + 1,
|
||||
ArrayIndexVars.size());
|
||||
unsigned Size = totalSizeToAlloc<Stmt *>(Captures.size() + 1);
|
||||
void *Mem = Context.Allocate(Size);
|
||||
return new (Mem) LambdaExpr(T, IntroducerRange,
|
||||
CaptureDefault, CaptureDefaultLoc, Captures,
|
||||
ExplicitParams, ExplicitResultType,
|
||||
CaptureInits, ArrayIndexVars, ArrayIndexStarts,
|
||||
ClosingBrace, ContainsUnexpandedParameterPack);
|
||||
return new (Mem)
|
||||
LambdaExpr(T, IntroducerRange, CaptureDefault, CaptureDefaultLoc,
|
||||
Captures, ExplicitParams, ExplicitResultType, CaptureInits,
|
||||
ClosingBrace, ContainsUnexpandedParameterPack);
|
||||
}
|
||||
|
||||
LambdaExpr *LambdaExpr::CreateDeserialized(const ASTContext &C,
|
||||
unsigned NumCaptures,
|
||||
unsigned NumArrayIndexVars) {
|
||||
unsigned Size = totalSizeToAlloc<Stmt *, unsigned, VarDecl *>(
|
||||
NumCaptures + 1, NumArrayIndexVars ? NumCaptures + 1 : 0,
|
||||
NumArrayIndexVars);
|
||||
unsigned NumCaptures) {
|
||||
unsigned Size = totalSizeToAlloc<Stmt *>(NumCaptures + 1);
|
||||
void *Mem = C.Allocate(Size);
|
||||
return new (Mem) LambdaExpr(EmptyShell(), NumCaptures, NumArrayIndexVars > 0);
|
||||
return new (Mem) LambdaExpr(EmptyShell(), NumCaptures);
|
||||
}
|
||||
|
||||
bool LambdaExpr::isInitCapture(const LambdaCapture *C) const {
|
||||
|
@ -995,19 +975,6 @@ LambdaExpr::capture_range LambdaExpr::implicit_captures() const {
|
|||
return capture_range(implicit_capture_begin(), implicit_capture_end());
|
||||
}
|
||||
|
||||
ArrayRef<VarDecl *>
|
||||
LambdaExpr::getCaptureInitIndexVars(const_capture_init_iterator Iter) const {
|
||||
assert(HasArrayIndexVars && "No array index-var data?");
|
||||
|
||||
unsigned Index = Iter - capture_init_begin();
|
||||
assert(Index < getLambdaClass()->getLambdaData().NumCaptures &&
|
||||
"Capture index out-of-range");
|
||||
VarDecl *const *IndexVars = getArrayIndexVars();
|
||||
const unsigned *IndexStarts = getArrayIndexStarts();
|
||||
return llvm::makeArrayRef(IndexVars + IndexStarts[Index],
|
||||
IndexVars + IndexStarts[Index + 1]);
|
||||
}
|
||||
|
||||
CXXRecordDecl *LambdaExpr::getLambdaClass() const {
|
||||
return getType()->getAsCXXRecordDecl();
|
||||
}
|
||||
|
|
|
@ -562,140 +562,6 @@ static void EmitBaseInitializer(CodeGenFunction &CGF,
|
|||
isBaseVirtual);
|
||||
}
|
||||
|
||||
/// Initialize a member of aggregate type using the given expression
|
||||
/// as an initializer.
|
||||
///
|
||||
/// The member may be an array. If so:
|
||||
/// - the destination l-value will be a pointer of the *base* element type,
|
||||
/// - ArrayIndexVar will be a pointer to a variable containing the current
|
||||
/// index within the destination array, and
|
||||
/// - ArrayIndexes will be an array of index variables, one for each level
|
||||
/// of array nesting, which will need to be updated as appropriate for the
|
||||
/// array structure.
|
||||
///
|
||||
/// On an array, this function will invoke itself recursively. Each time,
|
||||
/// it drills into one nesting level of the member type and sets up a
|
||||
/// loop updating the appropriate array index variable.
|
||||
static void EmitAggMemberInitializer(CodeGenFunction &CGF,
|
||||
LValue LHS,
|
||||
Expr *Init,
|
||||
Address ArrayIndexVar,
|
||||
QualType T,
|
||||
ArrayRef<VarDecl *> ArrayIndexes,
|
||||
unsigned Index) {
|
||||
assert(ArrayIndexVar.isValid() == (ArrayIndexes.size() != 0));
|
||||
|
||||
if (Index == ArrayIndexes.size()) {
|
||||
LValue LV = LHS;
|
||||
|
||||
Optional<CodeGenFunction::RunCleanupsScope> Scope;
|
||||
|
||||
if (ArrayIndexVar.isValid()) {
|
||||
// When we're processing an array, the temporaries from each
|
||||
// element's construction are destroyed immediately.
|
||||
Scope.emplace(CGF);
|
||||
|
||||
// If we have an array index variable, load it and use it as an offset.
|
||||
// Then, increment the value.
|
||||
llvm::Value *Dest = LHS.getPointer();
|
||||
llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
|
||||
Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
|
||||
llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
|
||||
Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
|
||||
CGF.Builder.CreateStore(Next, ArrayIndexVar);
|
||||
|
||||
// Update the LValue.
|
||||
CharUnits EltSize = CGF.getContext().getTypeSizeInChars(T);
|
||||
CharUnits Align = LV.getAlignment().alignmentOfArrayElement(EltSize);
|
||||
LV.setAddress(Address(Dest, Align));
|
||||
|
||||
// Enter a partial-array EH cleanup to destroy previous members
|
||||
// of the array if this initialization throws.
|
||||
if (CGF.CGM.getLangOpts().Exceptions) {
|
||||
if (auto DtorKind = T.isDestructedType()) {
|
||||
if (CGF.needsEHCleanup(DtorKind)) {
|
||||
CGF.pushRegularPartialArrayCleanup(LHS.getPointer(),
|
||||
LV.getPointer(), T,
|
||||
LV.getAlignment(),
|
||||
CGF.getDestroyer(DtorKind));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch (CGF.getEvaluationKind(T)) {
|
||||
case TEK_Scalar:
|
||||
CGF.EmitScalarInit(Init, /*decl*/ nullptr, LV, false);
|
||||
break;
|
||||
case TEK_Complex:
|
||||
CGF.EmitComplexExprIntoLValue(Init, LV, /*isInit*/ true);
|
||||
break;
|
||||
case TEK_Aggregate: {
|
||||
AggValueSlot Slot =
|
||||
AggValueSlot::forLValue(LV,
|
||||
AggValueSlot::IsDestructed,
|
||||
AggValueSlot::DoesNotNeedGCBarriers,
|
||||
AggValueSlot::IsNotAliased);
|
||||
|
||||
CGF.EmitAggExpr(Init, Slot);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
|
||||
assert(Array && "Array initialization without the array type?");
|
||||
Address IndexVar = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]);
|
||||
|
||||
// Initialize this index variable to zero.
|
||||
llvm::Value* Zero
|
||||
= llvm::Constant::getNullValue(IndexVar.getElementType());
|
||||
CGF.Builder.CreateStore(Zero, IndexVar);
|
||||
|
||||
// Start the loop with a block that tests the condition.
|
||||
llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
|
||||
llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
|
||||
|
||||
CGF.EmitBlock(CondBlock);
|
||||
|
||||
llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
|
||||
// Generate: if (loop-index < number-of-elements) fall to the loop body,
|
||||
// otherwise, go to the block after the for-loop.
|
||||
uint64_t NumElements = Array->getSize().getZExtValue();
|
||||
llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
|
||||
llvm::Value *NumElementsPtr =
|
||||
llvm::ConstantInt::get(Counter->getType(), NumElements);
|
||||
llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
|
||||
"isless");
|
||||
|
||||
// If the condition is true, execute the body.
|
||||
CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
|
||||
|
||||
CGF.EmitBlock(ForBody);
|
||||
llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
|
||||
|
||||
// Inside the loop body recurse to emit the inner loop or, eventually, the
|
||||
// constructor call.
|
||||
EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar,
|
||||
Array->getElementType(), ArrayIndexes, Index + 1);
|
||||
|
||||
CGF.EmitBlock(ContinueBlock);
|
||||
|
||||
// Emit the increment of the loop counter.
|
||||
llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
|
||||
Counter = CGF.Builder.CreateLoad(IndexVar);
|
||||
NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
|
||||
CGF.Builder.CreateStore(NextVal, IndexVar);
|
||||
|
||||
// Finally, branch back up to the condition for the next iteration.
|
||||
CGF.EmitBranch(CondBlock);
|
||||
|
||||
// Emit the fall-through block.
|
||||
CGF.EmitBlock(AfterFor, true);
|
||||
}
|
||||
|
||||
static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) {
|
||||
auto *CD = dyn_cast<CXXConstructorDecl>(D);
|
||||
if (!(CD && CD->isCopyOrMoveConstructor()) &&
|
||||
|
@ -779,14 +645,11 @@ static void EmitMemberInitializer(CodeGenFunction &CGF,
|
|||
}
|
||||
}
|
||||
|
||||
ArrayRef<VarDecl *> ArrayIndexes;
|
||||
if (MemberInit->getNumArrayIndices())
|
||||
ArrayIndexes = MemberInit->getArrayIndices();
|
||||
CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes);
|
||||
CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit());
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS,
|
||||
Expr *Init, ArrayRef<VarDecl *> ArrayIndexes) {
|
||||
Expr *Init) {
|
||||
QualType FieldType = Field->getType();
|
||||
switch (getEvaluationKind(FieldType)) {
|
||||
case TEK_Scalar:
|
||||
|
@ -801,30 +664,13 @@ void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS,
|
|||
EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true);
|
||||
break;
|
||||
case TEK_Aggregate: {
|
||||
Address ArrayIndexVar = Address::invalid();
|
||||
if (ArrayIndexes.size()) {
|
||||
// The LHS is a pointer to the first object we'll be constructing, as
|
||||
// a flat array.
|
||||
QualType BaseElementTy = getContext().getBaseElementType(FieldType);
|
||||
llvm::Type *BasePtr = ConvertType(BaseElementTy);
|
||||
BasePtr = llvm::PointerType::getUnqual(BasePtr);
|
||||
Address BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), BasePtr);
|
||||
LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy);
|
||||
|
||||
// Create an array index that will be used to walk over all of the
|
||||
// objects we're constructing.
|
||||
ArrayIndexVar = CreateMemTemp(getContext().getSizeType(), "object.index");
|
||||
llvm::Value *Zero =
|
||||
llvm::Constant::getNullValue(ArrayIndexVar.getElementType());
|
||||
Builder.CreateStore(Zero, ArrayIndexVar);
|
||||
|
||||
// Emit the block variables for the array indices, if any.
|
||||
for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I)
|
||||
EmitAutoVarDecl(*ArrayIndexes[I]);
|
||||
}
|
||||
|
||||
EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType,
|
||||
ArrayIndexes, 0);
|
||||
AggValueSlot Slot =
|
||||
AggValueSlot::forLValue(LHS,
|
||||
AggValueSlot::IsDestructed,
|
||||
AggValueSlot::DoesNotNeedGCBarriers,
|
||||
AggValueSlot::IsNotAliased);
|
||||
EmitAggExpr(Init, Slot);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1328,27 +1328,12 @@ void AggExprEmitter::VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E) {
|
|||
llvm::Value *begin = Builder.CreateInBoundsGEP(destPtr.getPointer(), indices,
|
||||
"arrayinit.begin");
|
||||
|
||||
QualType elementType = E->getSubExpr()->getType();
|
||||
QualType elementType =
|
||||
CGF.getContext().getAsArrayType(E->getType())->getElementType();
|
||||
CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType);
|
||||
CharUnits elementAlign =
|
||||
destPtr.getAlignment().alignmentOfArrayElement(elementSize);
|
||||
|
||||
// Prepare for a cleanup.
|
||||
QualType::DestructionKind dtorKind = elementType.isDestructedType();
|
||||
Address endOfInit = Address::invalid();
|
||||
EHScopeStack::stable_iterator cleanup;
|
||||
llvm::Instruction *cleanupDominator = nullptr;
|
||||
if (CGF.needsEHCleanup(dtorKind)) {
|
||||
endOfInit = CGF.CreateTempAlloca(begin->getType(), CGF.getPointerAlign(),
|
||||
"arrayinit.endOfInit");
|
||||
CGF.pushIrregularPartialArrayCleanup(begin, endOfInit, elementType,
|
||||
elementAlign,
|
||||
CGF.getDestroyer(dtorKind));
|
||||
cleanup = CGF.EHStack.stable_begin();
|
||||
} else {
|
||||
dtorKind = QualType::DK_none;
|
||||
}
|
||||
|
||||
llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
|
||||
llvm::BasicBlock *bodyBB = CGF.createBasicBlock("arrayinit.body");
|
||||
|
||||
|
@ -1359,11 +1344,22 @@ void AggExprEmitter::VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E) {
|
|||
index->addIncoming(zero, entryBB);
|
||||
llvm::Value *element = Builder.CreateInBoundsGEP(begin, index);
|
||||
|
||||
// Tell the EH cleanup that we finished with the last element.
|
||||
if (endOfInit.isValid()) Builder.CreateStore(element, endOfInit);
|
||||
// Prepare for a cleanup.
|
||||
QualType::DestructionKind dtorKind = elementType.isDestructedType();
|
||||
EHScopeStack::stable_iterator cleanup;
|
||||
if (CGF.needsEHCleanup(dtorKind)) {
|
||||
CGF.pushRegularPartialArrayCleanup(
|
||||
begin, element, elementType, elementAlign, CGF.getDestroyer(dtorKind));
|
||||
cleanup = CGF.EHStack.stable_begin();
|
||||
} else {
|
||||
dtorKind = QualType::DK_none;
|
||||
}
|
||||
|
||||
// Emit the actual filler expression.
|
||||
{
|
||||
// Temporaries created in an array initialization loop are destroyed
|
||||
// at the end of each iteration.
|
||||
CodeGenFunction::RunCleanupsScope CleanupsScope(CGF);
|
||||
CodeGenFunction::ArrayInitLoopExprScope Scope(CGF, index);
|
||||
LValue elementLV =
|
||||
CGF.MakeAddrLValue(Address(element, elementAlign), elementType);
|
||||
|
@ -1385,7 +1381,8 @@ void AggExprEmitter::VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E) {
|
|||
CGF.EmitBlock(endBB);
|
||||
|
||||
// Leave the partial-array cleanup if we entered one.
|
||||
if (dtorKind) CGF.DeactivateCleanupBlock(cleanup, cleanupDominator);
|
||||
if (dtorKind)
|
||||
CGF.DeactivateCleanupBlock(cleanup, index);
|
||||
}
|
||||
|
||||
void AggExprEmitter::VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr *E) {
|
||||
|
|
|
@ -2140,10 +2140,7 @@ void CodeGenFunction::EmitLambdaExpr(const LambdaExpr *E, AggValueSlot Slot) {
|
|||
auto VAT = CurField->getCapturedVLAType();
|
||||
EmitStoreThroughLValue(RValue::get(VLASizeMap[VAT->getSizeExpr()]), LV);
|
||||
} else {
|
||||
ArrayRef<VarDecl *> ArrayIndexes;
|
||||
if (CurField->getType()->isArrayType())
|
||||
ArrayIndexes = E->getCaptureInitIndexVars(i);
|
||||
EmitInitializerForField(*CurField, LV, *i, ArrayIndexes);
|
||||
EmitInitializerForField(*CurField, LV, *i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2206,7 +2206,7 @@ LValue CodeGenFunction::InitCapturedStruct(const CapturedStmt &S) {
|
|||
auto VAT = CurField->getCapturedVLAType();
|
||||
EmitStoreThroughLValue(RValue::get(VLASizeMap[VAT->getSizeExpr()]), LV);
|
||||
} else {
|
||||
EmitInitializerForField(*CurField, LV, *I, None);
|
||||
EmitInitializerForField(*CurField, LV, *I);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1601,8 +1601,7 @@ public:
|
|||
void EmitCtorPrologue(const CXXConstructorDecl *CD, CXXCtorType Type,
|
||||
FunctionArgList &Args);
|
||||
|
||||
void EmitInitializerForField(FieldDecl *Field, LValue LHS, Expr *Init,
|
||||
ArrayRef<VarDecl *> ArrayIndexes);
|
||||
void EmitInitializerForField(FieldDecl *Field, LValue LHS, Expr *Init);
|
||||
|
||||
/// Struct with all informations about dynamic [sub]class needed to set vptr.
|
||||
struct VPtr {
|
||||
|
|
|
@ -4246,98 +4246,30 @@ BuildImplicitMemberInitializer(Sema &SemaRef, CXXConstructorDecl *Constructor,
|
|||
CtorArg = CastForMoving(SemaRef, CtorArg.get());
|
||||
}
|
||||
|
||||
// When the field we are copying is an array, create index variables for
|
||||
// each dimension of the array. We use these index variables to subscript
|
||||
// the source array, and other clients (e.g., CodeGen) will perform the
|
||||
// necessary iteration with these index variables.
|
||||
SmallVector<VarDecl *, 4> IndexVariables;
|
||||
QualType BaseType = Field->getType();
|
||||
QualType SizeType = SemaRef.Context.getSizeType();
|
||||
bool InitializingArray = false;
|
||||
while (const ConstantArrayType *Array
|
||||
= SemaRef.Context.getAsConstantArrayType(BaseType)) {
|
||||
InitializingArray = true;
|
||||
// Create the iteration variable for this array index.
|
||||
IdentifierInfo *IterationVarName = nullptr;
|
||||
{
|
||||
SmallString<8> Str;
|
||||
llvm::raw_svector_ostream OS(Str);
|
||||
OS << "__i" << IndexVariables.size();
|
||||
IterationVarName = &SemaRef.Context.Idents.get(OS.str());
|
||||
}
|
||||
VarDecl *IterationVar
|
||||
= VarDecl::Create(SemaRef.Context, SemaRef.CurContext, Loc, Loc,
|
||||
IterationVarName, SizeType,
|
||||
SemaRef.Context.getTrivialTypeSourceInfo(SizeType, Loc),
|
||||
SC_None);
|
||||
IndexVariables.push_back(IterationVar);
|
||||
|
||||
// Create a reference to the iteration variable.
|
||||
ExprResult IterationVarRef
|
||||
= SemaRef.BuildDeclRefExpr(IterationVar, SizeType, VK_LValue, Loc);
|
||||
assert(!IterationVarRef.isInvalid() &&
|
||||
"Reference to invented variable cannot fail!");
|
||||
IterationVarRef = SemaRef.DefaultLvalueConversion(IterationVarRef.get());
|
||||
assert(!IterationVarRef.isInvalid() &&
|
||||
"Conversion of invented variable cannot fail!");
|
||||
InitializedEntity Entity =
|
||||
Indirect ? InitializedEntity::InitializeMember(Indirect, nullptr,
|
||||
/*Implicit*/ true)
|
||||
: InitializedEntity::InitializeMember(Field, nullptr,
|
||||
/*Implicit*/ true);
|
||||
|
||||
// Subscript the array with this iteration variable.
|
||||
CtorArg = SemaRef.CreateBuiltinArraySubscriptExpr(CtorArg.get(), Loc,
|
||||
IterationVarRef.get(),
|
||||
Loc);
|
||||
if (CtorArg.isInvalid())
|
||||
return true;
|
||||
|
||||
BaseType = Array->getElementType();
|
||||
}
|
||||
|
||||
// The array subscript expression is an lvalue, which is wrong for moving.
|
||||
if (Moving && InitializingArray)
|
||||
CtorArg = CastForMoving(SemaRef, CtorArg.get());
|
||||
|
||||
// Construct the entity that we will be initializing. For an array, this
|
||||
// will be first element in the array, which may require several levels
|
||||
// of array-subscript entities.
|
||||
SmallVector<InitializedEntity, 4> Entities;
|
||||
Entities.reserve(1 + IndexVariables.size());
|
||||
if (Indirect)
|
||||
Entities.push_back(InitializedEntity::InitializeMember(Indirect));
|
||||
else
|
||||
Entities.push_back(InitializedEntity::InitializeMember(Field));
|
||||
for (unsigned I = 0, N = IndexVariables.size(); I != N; ++I)
|
||||
Entities.push_back(InitializedEntity::InitializeElement(SemaRef.Context,
|
||||
0,
|
||||
Entities.back()));
|
||||
|
||||
// Direct-initialize to use the copy constructor.
|
||||
InitializationKind InitKind =
|
||||
InitializationKind::CreateDirect(Loc, SourceLocation(), SourceLocation());
|
||||
|
||||
Expr *CtorArgE = CtorArg.getAs<Expr>();
|
||||
InitializationSequence InitSeq(SemaRef, Entities.back(), InitKind,
|
||||
CtorArgE);
|
||||
|
||||
ExprResult MemberInit
|
||||
= InitSeq.Perform(SemaRef, Entities.back(), InitKind,
|
||||
MultiExprArg(&CtorArgE, 1));
|
||||
InitializationSequence InitSeq(SemaRef, Entity, InitKind, CtorArgE);
|
||||
ExprResult MemberInit =
|
||||
InitSeq.Perform(SemaRef, Entity, InitKind, MultiExprArg(&CtorArgE, 1));
|
||||
MemberInit = SemaRef.MaybeCreateExprWithCleanups(MemberInit);
|
||||
if (MemberInit.isInvalid())
|
||||
return true;
|
||||
|
||||
if (Indirect) {
|
||||
assert(IndexVariables.size() == 0 &&
|
||||
"Indirect field improperly initialized");
|
||||
CXXMemberInit
|
||||
= new (SemaRef.Context) CXXCtorInitializer(SemaRef.Context, Indirect,
|
||||
Loc, Loc,
|
||||
MemberInit.getAs<Expr>(),
|
||||
Loc);
|
||||
} else
|
||||
CXXMemberInit = CXXCtorInitializer::Create(SemaRef.Context, Field, Loc,
|
||||
Loc, MemberInit.getAs<Expr>(),
|
||||
Loc,
|
||||
IndexVariables.data(),
|
||||
IndexVariables.size());
|
||||
if (Indirect)
|
||||
CXXMemberInit = new (SemaRef.Context) CXXCtorInitializer(
|
||||
SemaRef.Context, Indirect, Loc, Loc, MemberInit.getAs<Expr>(), Loc);
|
||||
else
|
||||
CXXMemberInit = new (SemaRef.Context) CXXCtorInitializer(
|
||||
SemaRef.Context, Field, Loc, Loc, MemberInit.getAs<Expr>(), Loc);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -4348,9 +4280,11 @@ BuildImplicitMemberInitializer(Sema &SemaRef, CXXConstructorDecl *Constructor,
|
|||
SemaRef.Context.getBaseElementType(Field->getType());
|
||||
|
||||
if (FieldBaseElementType->isRecordType()) {
|
||||
InitializedEntity InitEntity
|
||||
= Indirect? InitializedEntity::InitializeMember(Indirect)
|
||||
: InitializedEntity::InitializeMember(Field);
|
||||
InitializedEntity InitEntity =
|
||||
Indirect ? InitializedEntity::InitializeMember(Indirect, nullptr,
|
||||
/*Implicit*/ true)
|
||||
: InitializedEntity::InitializeMember(Field, nullptr,
|
||||
/*Implicit*/ true);
|
||||
InitializationKind InitKind =
|
||||
InitializationKind::CreateDefault(Loc);
|
||||
|
||||
|
|
|
@ -5227,8 +5227,9 @@ void InitializationSequence::InitializeFrom(Sema &S,
|
|||
InitializedEntity::InitializeElement(S.Context, 0, Entity);
|
||||
QualType InitEltT =
|
||||
Context.getAsArrayType(Initializer->getType())->getElementType();
|
||||
OpaqueValueExpr OVE(SourceLocation(), InitEltT,
|
||||
Initializer->getValueKind());
|
||||
OpaqueValueExpr OVE(Initializer->getExprLoc(), InitEltT,
|
||||
Initializer->getValueKind(),
|
||||
Initializer->getObjectKind());
|
||||
Expr *OVEAsExpr = &OVE;
|
||||
InitializeFrom(S, Element, Kind, OVEAsExpr, TopLevelOfInitList,
|
||||
TreatUnavailableAsInvalid);
|
||||
|
|
|
@ -1384,10 +1384,7 @@ static void addBlockPointerConversion(Sema &S,
|
|||
}
|
||||
|
||||
static ExprResult performLambdaVarCaptureInitialization(
|
||||
Sema &S, LambdaScopeInfo::Capture &Capture,
|
||||
FieldDecl *Field,
|
||||
SmallVectorImpl<VarDecl *> &ArrayIndexVars,
|
||||
SmallVectorImpl<unsigned> &ArrayIndexStarts) {
|
||||
Sema &S, LambdaScopeInfo::Capture &Capture, FieldDecl *Field) {
|
||||
assert(Capture.isVariableCapture() && "not a variable capture");
|
||||
|
||||
auto *Var = Capture.getVariable();
|
||||
|
@ -1411,69 +1408,11 @@ static ExprResult performLambdaVarCaptureInitialization(
|
|||
return ExprError();
|
||||
Expr *Ref = RefResult.get();
|
||||
|
||||
QualType FieldType = Field->getType();
|
||||
|
||||
// When the variable has array type, create index variables for each
|
||||
// dimension of the array. We use these index variables to subscript
|
||||
// the source array, and other clients (e.g., CodeGen) will perform
|
||||
// the necessary iteration with these index variables.
|
||||
//
|
||||
// FIXME: This is dumb. Add a proper AST representation for array
|
||||
// copy-construction and use it here.
|
||||
SmallVector<VarDecl *, 4> IndexVariables;
|
||||
QualType BaseType = FieldType;
|
||||
QualType SizeType = S.Context.getSizeType();
|
||||
ArrayIndexStarts.push_back(ArrayIndexVars.size());
|
||||
while (const ConstantArrayType *Array
|
||||
= S.Context.getAsConstantArrayType(BaseType)) {
|
||||
// Create the iteration variable for this array index.
|
||||
IdentifierInfo *IterationVarName = nullptr;
|
||||
{
|
||||
SmallString<8> Str;
|
||||
llvm::raw_svector_ostream OS(Str);
|
||||
OS << "__i" << IndexVariables.size();
|
||||
IterationVarName = &S.Context.Idents.get(OS.str());
|
||||
}
|
||||
VarDecl *IterationVar = VarDecl::Create(
|
||||
S.Context, S.CurContext, Loc, Loc, IterationVarName, SizeType,
|
||||
S.Context.getTrivialTypeSourceInfo(SizeType, Loc), SC_None);
|
||||
IterationVar->setImplicit();
|
||||
IndexVariables.push_back(IterationVar);
|
||||
ArrayIndexVars.push_back(IterationVar);
|
||||
|
||||
// Create a reference to the iteration variable.
|
||||
ExprResult IterationVarRef =
|
||||
S.BuildDeclRefExpr(IterationVar, SizeType, VK_LValue, Loc);
|
||||
assert(!IterationVarRef.isInvalid() &&
|
||||
"Reference to invented variable cannot fail!");
|
||||
IterationVarRef = S.DefaultLvalueConversion(IterationVarRef.get());
|
||||
assert(!IterationVarRef.isInvalid() &&
|
||||
"Conversion of invented variable cannot fail!");
|
||||
|
||||
// Subscript the array with this iteration variable.
|
||||
ExprResult Subscript =
|
||||
S.CreateBuiltinArraySubscriptExpr(Ref, Loc, IterationVarRef.get(), Loc);
|
||||
if (Subscript.isInvalid())
|
||||
return ExprError();
|
||||
|
||||
Ref = Subscript.get();
|
||||
BaseType = Array->getElementType();
|
||||
}
|
||||
|
||||
// Construct the entity that we will be initializing. For an array, this
|
||||
// will be first element in the array, which may require several levels
|
||||
// of array-subscript entities.
|
||||
SmallVector<InitializedEntity, 4> Entities;
|
||||
Entities.reserve(1 + IndexVariables.size());
|
||||
Entities.push_back(InitializedEntity::InitializeLambdaCapture(
|
||||
Var->getIdentifier(), FieldType, Loc));
|
||||
for (unsigned I = 0, N = IndexVariables.size(); I != N; ++I)
|
||||
Entities.push_back(
|
||||
InitializedEntity::InitializeElement(S.Context, 0, Entities.back()));
|
||||
|
||||
auto Entity = InitializedEntity::InitializeLambdaCapture(
|
||||
Var->getIdentifier(), Field->getType(), Loc);
|
||||
InitializationKind InitKind = InitializationKind::CreateDirect(Loc, Loc, Loc);
|
||||
InitializationSequence Init(S, Entities.back(), InitKind, Ref);
|
||||
return Init.Perform(S, Entities.back(), InitKind, Ref);
|
||||
InitializationSequence Init(S, Entity, InitKind, Ref);
|
||||
return Init.Perform(S, Entity, InitKind, Ref);
|
||||
}
|
||||
|
||||
ExprResult Sema::ActOnLambdaExpr(SourceLocation StartLoc, Stmt *Body,
|
||||
|
@ -1514,8 +1453,6 @@ ExprResult Sema::BuildLambdaExpr(SourceLocation StartLoc, SourceLocation EndLoc,
|
|||
bool ExplicitResultType;
|
||||
CleanupInfo LambdaCleanup;
|
||||
bool ContainsUnexpandedParameterPack;
|
||||
SmallVector<VarDecl *, 4> ArrayIndexVars;
|
||||
SmallVector<unsigned, 4> ArrayIndexStarts;
|
||||
{
|
||||
CallOperator = LSI->CallOperator;
|
||||
Class = LSI->Lambda;
|
||||
|
@ -1549,14 +1486,12 @@ ExprResult Sema::BuildLambdaExpr(SourceLocation StartLoc, SourceLocation EndLoc,
|
|||
LambdaCapture(From.getLocation(), IsImplicit,
|
||||
From.isCopyCapture() ? LCK_StarThis : LCK_This));
|
||||
CaptureInits.push_back(From.getInitExpr());
|
||||
ArrayIndexStarts.push_back(ArrayIndexVars.size());
|
||||
continue;
|
||||
}
|
||||
if (From.isVLATypeCapture()) {
|
||||
Captures.push_back(
|
||||
LambdaCapture(From.getLocation(), IsImplicit, LCK_VLAType));
|
||||
CaptureInits.push_back(nullptr);
|
||||
ArrayIndexStarts.push_back(ArrayIndexVars.size());
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -1566,13 +1501,11 @@ ExprResult Sema::BuildLambdaExpr(SourceLocation StartLoc, SourceLocation EndLoc,
|
|||
Var, From.getEllipsisLoc()));
|
||||
Expr *Init = From.getInitExpr();
|
||||
if (!Init) {
|
||||
auto InitResult = performLambdaVarCaptureInitialization(
|
||||
*this, From, *CurField, ArrayIndexVars, ArrayIndexStarts);
|
||||
auto InitResult =
|
||||
performLambdaVarCaptureInitialization(*this, From, *CurField);
|
||||
if (InitResult.isInvalid())
|
||||
return ExprError();
|
||||
Init = InitResult.get();
|
||||
} else {
|
||||
ArrayIndexStarts.push_back(ArrayIndexVars.size());
|
||||
}
|
||||
CaptureInits.push_back(Init);
|
||||
}
|
||||
|
@ -1609,8 +1542,7 @@ ExprResult Sema::BuildLambdaExpr(SourceLocation StartLoc, SourceLocation EndLoc,
|
|||
CaptureDefault, CaptureDefaultLoc,
|
||||
Captures,
|
||||
ExplicitParams, ExplicitResultType,
|
||||
CaptureInits, ArrayIndexVars,
|
||||
ArrayIndexStarts, EndLoc,
|
||||
CaptureInits, EndLoc,
|
||||
ContainsUnexpandedParameterPack);
|
||||
// If the lambda expression's call operator is not explicitly marked constexpr
|
||||
// and we are not in a dependent context, analyze the call operator to infer
|
||||
|
|
|
@ -8167,49 +8167,29 @@ ASTReader::ReadCXXCtorInitializers(ModuleFile &F, const RecordData &Record,
|
|||
Expr *Init = ReadExpr(F);
|
||||
SourceLocation LParenLoc = ReadSourceLocation(F, Record, Idx);
|
||||
SourceLocation RParenLoc = ReadSourceLocation(F, Record, Idx);
|
||||
bool IsWritten = Record[Idx++];
|
||||
unsigned SourceOrderOrNumArrayIndices;
|
||||
SmallVector<VarDecl *, 8> Indices;
|
||||
if (IsWritten) {
|
||||
SourceOrderOrNumArrayIndices = Record[Idx++];
|
||||
} else {
|
||||
SourceOrderOrNumArrayIndices = Record[Idx++];
|
||||
Indices.reserve(SourceOrderOrNumArrayIndices);
|
||||
for (unsigned i = 0; i != SourceOrderOrNumArrayIndices; ++i)
|
||||
Indices.push_back(ReadDeclAs<VarDecl>(F, Record, Idx));
|
||||
}
|
||||
|
||||
CXXCtorInitializer *BOMInit;
|
||||
if (Type == CTOR_INITIALIZER_BASE) {
|
||||
if (Type == CTOR_INITIALIZER_BASE)
|
||||
BOMInit = new (Context)
|
||||
CXXCtorInitializer(Context, TInfo, IsBaseVirtual, LParenLoc, Init,
|
||||
RParenLoc, MemberOrEllipsisLoc);
|
||||
} else if (Type == CTOR_INITIALIZER_DELEGATING) {
|
||||
else if (Type == CTOR_INITIALIZER_DELEGATING)
|
||||
BOMInit = new (Context)
|
||||
CXXCtorInitializer(Context, TInfo, LParenLoc, Init, RParenLoc);
|
||||
} else if (IsWritten) {
|
||||
if (Member)
|
||||
BOMInit = new (Context) CXXCtorInitializer(
|
||||
Context, Member, MemberOrEllipsisLoc, LParenLoc, Init, RParenLoc);
|
||||
else
|
||||
BOMInit = new (Context)
|
||||
CXXCtorInitializer(Context, IndirectMember, MemberOrEllipsisLoc,
|
||||
LParenLoc, Init, RParenLoc);
|
||||
} else {
|
||||
if (IndirectMember) {
|
||||
assert(Indices.empty() && "Indirect field improperly initialized");
|
||||
BOMInit = new (Context)
|
||||
CXXCtorInitializer(Context, IndirectMember, MemberOrEllipsisLoc,
|
||||
LParenLoc, Init, RParenLoc);
|
||||
} else {
|
||||
BOMInit = CXXCtorInitializer::Create(
|
||||
Context, Member, MemberOrEllipsisLoc, LParenLoc, Init, RParenLoc,
|
||||
Indices.data(), Indices.size());
|
||||
}
|
||||
else if (Member)
|
||||
BOMInit = new (Context)
|
||||
CXXCtorInitializer(Context, Member, MemberOrEllipsisLoc, LParenLoc,
|
||||
Init, RParenLoc);
|
||||
else
|
||||
BOMInit = new (Context)
|
||||
CXXCtorInitializer(Context, IndirectMember, MemberOrEllipsisLoc,
|
||||
LParenLoc, Init, RParenLoc);
|
||||
|
||||
if (bool IsWritten = Record[Idx++]) {
|
||||
unsigned SourceOrder = Record[Idx++];
|
||||
BOMInit->setSourceOrder(SourceOrder);
|
||||
}
|
||||
|
||||
if (IsWritten)
|
||||
BOMInit->setSourceOrder(SourceOrderOrNumArrayIndices);
|
||||
CtorInitializers[i] = BOMInit;
|
||||
}
|
||||
|
||||
|
|
|
@ -1287,7 +1287,6 @@ void ASTStmtReader::VisitLambdaExpr(LambdaExpr *E) {
|
|||
VisitExpr(E);
|
||||
unsigned NumCaptures = Record[Idx++];
|
||||
assert(NumCaptures == E->NumCaptures);(void)NumCaptures;
|
||||
unsigned NumArrayIndexVars = Record[Idx++];
|
||||
E->IntroducerRange = ReadSourceRange(Record, Idx);
|
||||
E->CaptureDefault = static_cast<LambdaCaptureDefault>(Record[Idx++]);
|
||||
E->CaptureDefaultLoc = ReadSourceLocation(Record, Idx);
|
||||
|
@ -1300,17 +1299,6 @@ void ASTStmtReader::VisitLambdaExpr(LambdaExpr *E) {
|
|||
CEnd = E->capture_init_end();
|
||||
C != CEnd; ++C)
|
||||
*C = Reader.ReadSubExpr();
|
||||
|
||||
// Read array capture index variables.
|
||||
if (NumArrayIndexVars > 0) {
|
||||
unsigned *ArrayIndexStarts = E->getArrayIndexStarts();
|
||||
for (unsigned I = 0; I != NumCaptures + 1; ++I)
|
||||
ArrayIndexStarts[I] = Record[Idx++];
|
||||
|
||||
VarDecl **ArrayIndexVars = E->getArrayIndexVars();
|
||||
for (unsigned I = 0; I != NumArrayIndexVars; ++I)
|
||||
ArrayIndexVars[I] = ReadDeclAs<VarDecl>(Record, Idx);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -3875,9 +3863,7 @@ Stmt *ASTReader::ReadStmtFromStream(ModuleFile &F) {
|
|||
|
||||
case EXPR_LAMBDA: {
|
||||
unsigned NumCaptures = Record[ASTStmtReader::NumExprFields];
|
||||
unsigned NumArrayIndexVars = Record[ASTStmtReader::NumExprFields + 1];
|
||||
S = LambdaExpr::CreateDeserialized(Context, NumCaptures,
|
||||
NumArrayIndexVars);
|
||||
S = LambdaExpr::CreateDeserialized(Context, NumCaptures);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5588,13 +5588,8 @@ EmitCXXCtorInitializers(ASTWriter &W,
|
|||
Writer.AddSourceLocation(Init->getLParenLoc());
|
||||
Writer.AddSourceLocation(Init->getRParenLoc());
|
||||
Writer.push_back(Init->isWritten());
|
||||
if (Init->isWritten()) {
|
||||
if (Init->isWritten())
|
||||
Writer.push_back(Init->getSourceOrder());
|
||||
} else {
|
||||
Writer.push_back(Init->getNumArrayIndices());
|
||||
for (auto *VD : Init->getArrayIndices())
|
||||
Writer.AddDeclRef(VD);
|
||||
}
|
||||
}
|
||||
|
||||
return Writer.Emit(serialization::DECL_CXX_CTOR_INITIALIZERS);
|
||||
|
|
|
@ -1257,10 +1257,6 @@ void ASTStmtWriter::VisitCXXTemporaryObjectExpr(CXXTemporaryObjectExpr *E) {
|
|||
void ASTStmtWriter::VisitLambdaExpr(LambdaExpr *E) {
|
||||
VisitExpr(E);
|
||||
Record.push_back(E->NumCaptures);
|
||||
unsigned NumArrayIndexVars = 0;
|
||||
if (E->HasArrayIndexVars)
|
||||
NumArrayIndexVars = E->getArrayIndexStarts()[E->NumCaptures];
|
||||
Record.push_back(NumArrayIndexVars);
|
||||
Record.AddSourceRange(E->IntroducerRange);
|
||||
Record.push_back(E->CaptureDefault); // FIXME: stable encoding
|
||||
Record.AddSourceLocation(E->CaptureDefaultLoc);
|
||||
|
@ -1275,15 +1271,6 @@ void ASTStmtWriter::VisitLambdaExpr(LambdaExpr *E) {
|
|||
Record.AddStmt(*C);
|
||||
}
|
||||
|
||||
// Add array index variables, if any.
|
||||
if (NumArrayIndexVars) {
|
||||
Record.append(E->getArrayIndexStarts(),
|
||||
E->getArrayIndexStarts() + E->NumCaptures + 1);
|
||||
VarDecl **ArrayIndexVars = E->getArrayIndexVars();
|
||||
for (unsigned I = 0; I != NumArrayIndexVars; ++I)
|
||||
Record.AddDeclRef(ArrayIndexVars[I]);
|
||||
}
|
||||
|
||||
Code = serialization::EXPR_LAMBDA;
|
||||
}
|
||||
|
||||
|
|
|
@ -507,7 +507,7 @@ void ExprEngine::ProcessInitializer(const CFGInitializer Init,
|
|||
}
|
||||
|
||||
SVal InitVal;
|
||||
if (BMI->getNumArrayIndices() > 0) {
|
||||
if (Init->getType()->isArrayType()) {
|
||||
// Handle arrays of trivial type. We can represent this with a
|
||||
// primitive load/copy from the base array region.
|
||||
const ArraySubscriptExpr *ASE;
|
||||
|
|
|
@ -2073,11 +2073,10 @@ RegionStoreManager::bindArray(RegionBindingsConstRef B,
|
|||
if (Init.getAs<nonloc::LazyCompoundVal>())
|
||||
return bindAggregate(B, R, Init);
|
||||
|
||||
// Remaining case: explicit compound values.
|
||||
|
||||
if (Init.isUnknown())
|
||||
return setImplicitDefaultValue(B, R, ElementTy);
|
||||
return bindAggregate(B, R, UnknownVal());
|
||||
|
||||
// Remaining case: explicit compound values.
|
||||
const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>();
|
||||
nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
|
||||
uint64_t i = 0;
|
||||
|
|
|
@ -36,8 +36,11 @@ DefinedOrUnknownSVal SValBuilder::makeZeroVal(QualType type) {
|
|||
if (type->isIntegralOrEnumerationType())
|
||||
return makeIntVal(0, type);
|
||||
|
||||
if (type->isArrayType() || type->isRecordType() || type->isVectorType() ||
|
||||
type->isAnyComplexType())
|
||||
return makeCompoundVal(type, BasicVals.getEmptySValList());
|
||||
|
||||
// FIXME: Handle floats.
|
||||
// FIXME: Handle structs.
|
||||
return UnknownVal();
|
||||
}
|
||||
|
||||
|
|
|
@ -110,8 +110,8 @@ void move_VirtualWithEmptyBase(VirtualWithEmptyBase &x, VirtualWithEmptyBase &y)
|
|||
// CHECK-CTOR: call {{.*}} @_ZN1FC1EOS_
|
||||
// CHECK-CTOR: call {{.*}} @_ZN1EC1EOS_
|
||||
// array loop
|
||||
// CHECK-CTOR: br i1
|
||||
// CHECK-CTOR: call {{.*}} @_ZN1FC1EOS_
|
||||
// CHECK-CTOR: br i1
|
||||
|
||||
// CHECK-CTOR: define linkonce_odr {{.*}} @_ZN1GC2EOS_
|
||||
// CHECK-CTOR: call {{.*}} @_ZN1EC1EOS_
|
||||
|
|
|
@ -207,20 +207,17 @@ namespace PR10720 {
|
|||
|
||||
// CHECK-PR10720-LABEL: define linkonce_odr void @_ZN7PR107205pair2C2ERKS0_
|
||||
// CHECK-PR10720-NOT: ret
|
||||
// CHECK-PR10720: load
|
||||
// CHECK-PR10720: icmp ult
|
||||
// CHECK-PR10720-NEXT: br i1
|
||||
// CHECK-PR10720: call void @_ZN7PR107201XC1ERKS0_
|
||||
// CHECK-PR10720-NEXT: br label
|
||||
// CHECK-PR10720: icmp eq
|
||||
// CHECK-PR10720-NEXT: br i1
|
||||
// CHECK-PR10720: ret void
|
||||
|
||||
// CHECK-PR10720-LABEL: define linkonce_odr void @_ZN7PR107205pair2C2EOS0_
|
||||
// CHECK-PR10720-NOT: ret
|
||||
// CHECK-PR10720: load
|
||||
// CHECK-PR10720: icmp ult
|
||||
// CHECK-PR10720-NEXT: br i1
|
||||
// CHECK-PR10720: call void @_ZN7PR107201XC1EOS0_
|
||||
// CHECK-PR10720-NEXT: br label
|
||||
// CHECK-PR10720: icmp eq
|
||||
// CHECK-PR10720-NEXT: br i1
|
||||
// CHECK-PR10720: ret void
|
||||
pair2(pair2&&) = default;
|
||||
|
||||
|
|
|
@ -48,12 +48,14 @@ void f(D d) {
|
|||
// CHECK: call void @_ZN1BC2ERS_
|
||||
// CHECK: {{call void @llvm.memcpy.p0i8.p0i8.i64.*i64 28}}
|
||||
// CHECK: call void @_ZN1BC1ERS_
|
||||
// CHECK: br
|
||||
// CHECK: {{icmp ult.*, 2}}
|
||||
// CHECK: {{icmp ult.*, 3}}
|
||||
// CHECK: br label
|
||||
// CHECK: call void @_ZN1AC1Ev
|
||||
// CHECK: call void @_ZN1CC1ERS_1A
|
||||
// CHECK: call void @_ZN1AD1Ev
|
||||
// CHECK: {{icmp eq.*, 3}}
|
||||
// CHECK: br i1
|
||||
// CHECK: {{icmp eq.*, 2}}
|
||||
// CHECK: br i1
|
||||
// CHECK: {{call void @llvm.memcpy.p0i8.p0i8.i64.*i64 300}}
|
||||
// CHECK: ret void
|
||||
|
||||
|
|
|
@ -54,12 +54,14 @@ int c(int x) { return [&x]{return x;}(); }
|
|||
// CHECK: ret i32
|
||||
|
||||
struct D { D(); D(const D&); int x; };
|
||||
int d(int x) { D y[10]; [x,y] { return y[x].x; }(); }
|
||||
int d(int x) { D y[10]; return [x,y] { return y[x].x; }(); }
|
||||
|
||||
// CHECK-LABEL: define i32 @_Z1di
|
||||
// CHECK: call void @_ZN1DC1Ev
|
||||
// CHECK: icmp ult i64 %{{.*}}, 10
|
||||
// CHECK: br label
|
||||
// CHECK: call void @_ZN1DC1ERKS_
|
||||
// CHECK: icmp eq i64 %{{.*}}, 10
|
||||
// CHECK: br i1
|
||||
// CHECK: call i32 @"_ZZ1diENK3$_4clEv"
|
||||
// CHECK-LABEL: define internal i32 @"_ZZ1diENK3$_4clEv"
|
||||
// CHECK: load i32, i32*
|
||||
|
@ -67,7 +69,7 @@ int d(int x) { D y[10]; [x,y] { return y[x].x; }(); }
|
|||
// CHECK: ret i32
|
||||
|
||||
struct E { E(); E(const E&); ~E(); int x; };
|
||||
int e(E a, E b, bool cond) { [a,b,cond](){ return (cond ? a : b).x; }(); }
|
||||
int e(E a, E b, bool cond) { return [a,b,cond](){ return (cond ? a : b).x; }(); }
|
||||
// CHECK-LABEL: define i32 @_Z1e1ES_b
|
||||
// CHECK: call void @_ZN1EC1ERKS_
|
||||
// CHECK: invoke void @_ZN1EC1ERKS_
|
||||
|
@ -127,42 +129,66 @@ namespace pr28595 {
|
|||
~A();
|
||||
};
|
||||
|
||||
void after_init() noexcept;
|
||||
|
||||
// CHECK-LABEL: define void @_ZN7pr285954testEv()
|
||||
void test() {
|
||||
// CHECK: [[ARRAY:%.*]] = alloca [3 x [5 x [[A:%.*]]]], align 1
|
||||
// CHECK: [[DESTIDX:%.*]] = alloca i64, align 8
|
||||
// CHECK: [[I0:%.*]] = alloca i64, align 8
|
||||
// CHECK: [[I1:%.*]] = alloca i64, align 8
|
||||
// CHECK: %[[SRC:.*]] = alloca [3 x [5 x %[[A:.*]]]], align 1
|
||||
A array[3][5];
|
||||
|
||||
// CHECK: [[DESTBASE:%.*]] = bitcast [3 x [5 x [[A]]]]* {{.*}} to [[A]]*
|
||||
// CHECK: store i64 0, i64* [[DESTIDX]], align 8
|
||||
// CHECK: store i64 0, i64* [[I0]], align 8
|
||||
// Skip over the initialization loop.
|
||||
// CHECK: call {{.*}}after_init
|
||||
after_init();
|
||||
|
||||
// CHECK: %[[DST_0:.*]] = getelementptr inbounds [3 x [5 x %[[A]]]], {{.*}}, i64 0, i64 0
|
||||
// CHECK: br label
|
||||
// CHECK: icmp ult
|
||||
// CHECK: store i64 0, i64* [[I1]], align 8
|
||||
// CHECK: %[[I:.*]] = phi i64 [ 0, %{{.*}} ], [ %[[I_NEXT:.*]], {{.*}} ]
|
||||
// CHECK: %[[DST_I:.*]] = getelementptr {{.*}} [5 x %[[A]]]* %[[DST_0]], i64 %[[I]]
|
||||
// CHECK: %[[SRC_I:.*]] = getelementptr {{.*}} [3 x [5 x %[[A]]]]* %[[SRC]], i64 0, i64 %[[I]]
|
||||
//
|
||||
// CHECK: %[[DST_I_0:.*]] = getelementptr {{.*}} [5 x %[[A]]]* %[[DST_I]], i64 0, i64 0
|
||||
// CHECK: br label
|
||||
// CHECK: icmp ult
|
||||
// CHECK: [[T0:%.*]] = load i64, i64* [[DESTIDX]], align 8
|
||||
// CHECK: [[DEST:%.*]] = getelementptr inbounds [[A]], [[A]]* [[DESTBASE]], i64 [[T0]]
|
||||
// CHECK: %[[J:.*]] = phi i64 [ 0, %{{.*}} ], [ %[[J_NEXT:.*]], {{.*}} ]
|
||||
// CHECK: %[[DST_I_J:.*]] = getelementptr {{.*}} %[[A]]* %[[DST_I_0]], i64 %[[J]]
|
||||
// CHECK: %[[SRC_I_J:.*]] = getelementptr {{.*}} [5 x %[[A]]]* %[[SRC_I]], i64 0, i64 %[[J]]
|
||||
//
|
||||
// CHECK: invoke void @_ZN7pr285954TempC1Ev
|
||||
// CHECK: invoke void @_ZN7pr285951AC1ERKS0_RKNS_4TempE
|
||||
// CHECK: invoke void @_ZN7pr285954TempD1Ev
|
||||
//
|
||||
// CHECK: add nuw i64 %[[J]], 1
|
||||
// CHECK: icmp eq
|
||||
// CHECK: br i1
|
||||
//
|
||||
// CHECK: add nuw i64 %[[I]], 1
|
||||
// CHECK: icmp eq
|
||||
// CHECK: br i1
|
||||
//
|
||||
// CHECK: ret void
|
||||
//
|
||||
// CHECK: landingpad
|
||||
// CHECK: landingpad
|
||||
// CHECK: br label [[CLEANUP:%.*]]{{$}}
|
||||
// CHECK: br label %[[CLEANUP:.*]]{{$}}
|
||||
// CHECK: landingpad
|
||||
// CHECK: invoke void @_ZN7pr285954TempD1Ev
|
||||
// CHECK: br label [[CLEANUP]]
|
||||
// CHECK: icmp eq [[A]]* [[DESTBASE]], [[DEST]]
|
||||
// CHECK: [[T0:%.*]] = phi [[A]]*
|
||||
// CHECK: [[T1:%.*]] = getelementptr inbounds [[A]], [[A]]* [[T0]], i64 -1
|
||||
// CHECK: call void @_ZN7pr285951AD1Ev([[A]]* [[T1]])
|
||||
// CHECK: icmp eq [[A]]* [[T1]], [[DESTBASE]]
|
||||
// CHECK: br label %[[CLEANUP]]
|
||||
//
|
||||
// FIXME: only emit a single cleanup loop here
|
||||
// CHECK: [[CLEANUP]]:
|
||||
// CHECK: icmp eq %[[A]]* %[[DST_I_0]], %[[DST_I_J]]
|
||||
// CHECK: %[[T0:.*]] = phi %[[A]]*
|
||||
// CHECK: %[[T1:.*]] = getelementptr inbounds %[[A]], %[[A]]* %[[T0]], i64 -1
|
||||
// CHECK: call void @_ZN7pr285951AD1Ev(%[[A]]* %[[T1]])
|
||||
// CHECK: icmp eq %[[A]]* %[[T1]], %[[DST_I_0]]
|
||||
//
|
||||
// CHECK: %[[BEGIN:.*]] = getelementptr {{.*}} %[[DST_0]], i64 0, i64 0
|
||||
// CHECK: %[[END:.*]] = getelementptr {{.*}} %[[DST_I]], i64 0, i64 0
|
||||
// CHECK: icmp eq %[[A]]* %[[BEGIN]], %[[END]]
|
||||
// CHECK: %[[T0:.*]] = phi %[[A]]*
|
||||
// CHECK: %[[T1:.*]] = getelementptr inbounds %[[A]], %[[A]]* %[[T0]], i64 -1
|
||||
// CHECK: call void @_ZN7pr285951AD1Ev(%[[A]]* %[[T1]])
|
||||
// CHECK: icmp eq %[[A]]* %[[T1]], %[[BEGIN]]
|
||||
(void) [array]{};
|
||||
|
||||
// Skip over the initialization loop.
|
||||
// CHECK: [[BEGIN:%.*]] = getelementptr inbounds [3 x [5 x [[A]]]], [3 x [5 x [[A]]]]* [[ARRAY]], i32 0, i32 0, i32 0
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -141,10 +141,9 @@ void test_ObjCBlockMember_copy_assign(ObjCBlockMember m1, ObjCBlockMember m2) {
|
|||
|
||||
// Implicitly-generated copy constructor for ObjCArrayMember
|
||||
// CHECK-LABEL: define linkonce_odr void @_ZN15ObjCArrayMemberC2ERKS_
|
||||
// CHECK: br i1
|
||||
// CHECK: call i8* @objc_retain
|
||||
// CHECK-NEXT: store i8*
|
||||
// CHECK-NEXT: br label
|
||||
// CHECK: br i1
|
||||
// CHECK: ret
|
||||
|
||||
// Implicitly-generated default constructor for ObjCBlockMember
|
||||
|
|
|
@ -49,12 +49,14 @@ void f(D d) {
|
|||
// CHECK: call void @_ZN1BC2ERS_
|
||||
// CHECK: {{call void @llvm.memcpy.p0i8.p0i8.i64.*i64 24}}
|
||||
// CHECK: call void @_ZN1BC1ERS_
|
||||
// CHECK: br
|
||||
// CHECK: {{icmp ult.*, 2}}
|
||||
// CHECK: {{icmp ult.*, 3}}
|
||||
// CHECK: br label
|
||||
// CHECK: call void @_ZN1AC1Ev
|
||||
// CHECK: call void @_ZN1CC1ERS_1A
|
||||
// CHECK: call void @_ZN1AD1Ev
|
||||
// CHECK: {{icmp eq.*, 3}}
|
||||
// CHECK: br i1
|
||||
// CHECK: {{icmp eq.*, 2}}
|
||||
// CHECK: br i1
|
||||
// CHECK: {{call.*@objc_memmove_collectable}}
|
||||
// CHECK: {{call void @llvm.memcpy.p0i8.p0i8.i64.*i64 12}}
|
||||
// CHECK: ret void
|
||||
|
|
|
@ -566,6 +566,21 @@ struct ArrayRVal {
|
|||
};
|
||||
static_assert(ArrayRVal().elems[3].f() == 0, "");
|
||||
|
||||
namespace CopyCtor {
|
||||
struct A {
|
||||
constexpr A() {}
|
||||
constexpr A(const A &) {}
|
||||
};
|
||||
struct B {
|
||||
A a;
|
||||
int arr[10];
|
||||
};
|
||||
constexpr B b{{}, {1, 2, 3, 4, 5}};
|
||||
constexpr B c = b;
|
||||
static_assert(c.arr[2] == 3, "");
|
||||
static_assert(c.arr[7] == 0, "");
|
||||
}
|
||||
|
||||
constexpr int selfref[2][2][2] = {
|
||||
selfref[1][1][1] + 1, selfref[0][0][0] + 1,
|
||||
selfref[1][0][1] + 1, selfref[0][1][0] + 1,
|
||||
|
|
|
@ -133,25 +133,6 @@ TEST(RecursiveASTVisitor, AttributesAreVisited) {
|
|||
"};\n"));
|
||||
}
|
||||
|
||||
// Check to ensure that VarDecls are visited.
|
||||
class VarDeclVisitor : public ExpectedLocationVisitor<VarDeclVisitor> {
|
||||
public:
|
||||
bool VisitVarDecl(VarDecl *VD) {
|
||||
Match(VD->getNameAsString(), VD->getLocStart());
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
TEST(RecursiveASTVisitor, ArrayInitializersAreVisited) {
|
||||
VarDeclVisitor Visitor;
|
||||
Visitor.ExpectMatch("__i0", 1, 8);
|
||||
EXPECT_TRUE(
|
||||
Visitor.runOver("struct MyClass {\n"
|
||||
" int c[1];\n"
|
||||
" static MyClass Create() { return MyClass(); }\n"
|
||||
"};\n"));
|
||||
}
|
||||
|
||||
// Check to ensure that implicit default argument expressions are visited.
|
||||
class IntegerLiteralVisitor
|
||||
: public ExpectedLocationVisitor<IntegerLiteralVisitor> {
|
||||
|
|
Loading…
Reference in New Issue