2013-01-28 09:35:51 +08:00
|
|
|
//===- ObjCARCOpts.cpp - ObjC ARC Optimization ----------------------------===//
|
2011-06-16 07:37:01 +08:00
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \file
|
|
|
|
/// This file defines ObjC ARC optimizations. ARC stands for Automatic
|
|
|
|
/// Reference Counting and is a system for managing reference counts for objects
|
|
|
|
/// in Objective C.
|
|
|
|
///
|
|
|
|
/// The optimizations performed include elimination of redundant, partially
|
|
|
|
/// redundant, and inconsequential reference count operations, elimination of
|
2013-02-07 12:12:57 +08:00
|
|
|
/// redundant weak pointer operations, and numerous minor simplifications.
|
2013-01-14 08:35:14 +08:00
|
|
|
///
|
|
|
|
/// WARNING: This file knows about certain library functions. It recognizes them
|
|
|
|
/// by name, and hardwires knowledge of their semantics.
|
|
|
|
///
|
|
|
|
/// WARNING: This file knows about how certain Objective-C library functions are
|
|
|
|
/// used. Naive LLVM IR transformations which would otherwise be
|
|
|
|
/// behavior-preserving may break these assumptions.
|
|
|
|
///
|
2011-06-16 07:37:01 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2013-01-28 11:28:38 +08:00
|
|
|
#include "ObjCARC.h"
|
2013-07-06 09:39:23 +08:00
|
|
|
#include "ARCRuntimeEntryPoints.h"
|
2013-01-29 12:20:52 +08:00
|
|
|
#include "DependencyAnalysis.h"
|
2013-01-28 13:51:54 +08:00
|
|
|
#include "ObjCARCAliasAnalysis.h"
|
2013-01-29 11:03:03 +08:00
|
|
|
#include "ProvenanceAnalysis.h"
|
2011-06-16 07:37:01 +08:00
|
|
|
#include "llvm/ADT/DenseMap.h"
|
2013-05-25 04:44:02 +08:00
|
|
|
#include "llvm/ADT/DenseSet.h"
|
2013-01-28 12:12:07 +08:00
|
|
|
#include "llvm/ADT/STLExtras.h"
|
2013-01-29 11:03:03 +08:00
|
|
|
#include "llvm/ADT/SmallPtrSet.h"
|
2013-01-29 12:20:52 +08:00
|
|
|
#include "llvm/ADT/Statistic.h"
|
2014-03-04 19:45:46 +08:00
|
|
|
#include "llvm/IR/CFG.h"
|
2013-03-26 08:42:09 +08:00
|
|
|
#include "llvm/IR/IRBuilder.h"
|
2013-01-29 12:20:52 +08:00
|
|
|
#include "llvm/IR/LLVMContext.h"
|
2013-01-29 12:51:59 +08:00
|
|
|
#include "llvm/Support/Debug.h"
|
2013-01-29 17:09:27 +08:00
|
|
|
#include "llvm/Support/raw_ostream.h"
|
2013-01-28 12:12:07 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
using namespace llvm;
|
2013-01-28 11:28:38 +08:00
|
|
|
using namespace llvm::objcarc;
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2014-04-22 10:55:47 +08:00
|
|
|
#define DEBUG_TYPE "objc-arc-opts"
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \defgroup MiscUtils Miscellaneous utilities that are not ARC specific.
|
|
|
|
/// @{
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
namespace {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \brief An associative container with fast insertion-order (deterministic)
|
|
|
|
/// iteration over its elements. Plus the special blot operation.
|
2011-06-16 07:37:01 +08:00
|
|
|
template<class KeyT, class ValueT>
|
|
|
|
class MapVector {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Map keys to indices in Vector.
|
2011-06-16 07:37:01 +08:00
|
|
|
typedef DenseMap<KeyT, size_t> MapTy;
|
|
|
|
MapTy Map;
|
|
|
|
|
|
|
|
typedef std::vector<std::pair<KeyT, ValueT> > VectorTy;
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Keys and values.
|
2011-06-16 07:37:01 +08:00
|
|
|
VectorTy Vector;
|
|
|
|
|
|
|
|
public:
|
|
|
|
typedef typename VectorTy::iterator iterator;
|
|
|
|
typedef typename VectorTy::const_iterator const_iterator;
|
|
|
|
iterator begin() { return Vector.begin(); }
|
|
|
|
iterator end() { return Vector.end(); }
|
|
|
|
const_iterator begin() const { return Vector.begin(); }
|
|
|
|
const_iterator end() const { return Vector.end(); }
|
|
|
|
|
|
|
|
#ifdef XDEBUG
|
|
|
|
~MapVector() {
|
|
|
|
assert(Vector.size() >= Map.size()); // May differ due to blotting.
|
|
|
|
for (typename MapTy::const_iterator I = Map.begin(), E = Map.end();
|
|
|
|
I != E; ++I) {
|
|
|
|
assert(I->second < Vector.size());
|
|
|
|
assert(Vector[I->second].first == I->first);
|
|
|
|
}
|
|
|
|
for (typename VectorTy::const_iterator I = Vector.begin(),
|
|
|
|
E = Vector.end(); I != E; ++I)
|
|
|
|
assert(!I->first ||
|
|
|
|
(Map.count(I->first) &&
|
|
|
|
Map[I->first] == size_t(I - Vector.begin())));
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2012-03-02 09:13:53 +08:00
|
|
|
ValueT &operator[](const KeyT &Arg) {
|
2011-06-16 07:37:01 +08:00
|
|
|
std::pair<typename MapTy::iterator, bool> Pair =
|
|
|
|
Map.insert(std::make_pair(Arg, size_t(0)));
|
|
|
|
if (Pair.second) {
|
2012-03-02 09:13:53 +08:00
|
|
|
size_t Num = Vector.size();
|
|
|
|
Pair.first->second = Num;
|
2011-06-16 07:37:01 +08:00
|
|
|
Vector.push_back(std::make_pair(Arg, ValueT()));
|
2012-03-02 09:13:53 +08:00
|
|
|
return Vector[Num].second;
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
return Vector[Pair.first->second].second;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::pair<iterator, bool>
|
|
|
|
insert(const std::pair<KeyT, ValueT> &InsertPair) {
|
|
|
|
std::pair<typename MapTy::iterator, bool> Pair =
|
|
|
|
Map.insert(std::make_pair(InsertPair.first, size_t(0)));
|
|
|
|
if (Pair.second) {
|
2012-03-02 09:13:53 +08:00
|
|
|
size_t Num = Vector.size();
|
|
|
|
Pair.first->second = Num;
|
2011-06-16 07:37:01 +08:00
|
|
|
Vector.push_back(InsertPair);
|
2012-03-02 09:13:53 +08:00
|
|
|
return std::make_pair(Vector.begin() + Num, true);
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
return std::make_pair(Vector.begin() + Pair.first->second, false);
|
|
|
|
}
|
|
|
|
|
2013-05-14 07:49:42 +08:00
|
|
|
iterator find(const KeyT &Key) {
|
|
|
|
typename MapTy::iterator It = Map.find(Key);
|
|
|
|
if (It == Map.end()) return Vector.end();
|
|
|
|
return Vector.begin() + It->second;
|
|
|
|
}
|
|
|
|
|
2012-03-02 09:13:53 +08:00
|
|
|
const_iterator find(const KeyT &Key) const {
|
2011-06-16 07:37:01 +08:00
|
|
|
typename MapTy::const_iterator It = Map.find(Key);
|
|
|
|
if (It == Map.end()) return Vector.end();
|
|
|
|
return Vector.begin() + It->second;
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// This is similar to erase, but instead of removing the element from the
|
|
|
|
/// vector, it just zeros out the key in the vector. This leaves iterators
|
|
|
|
/// intact, but clients must be prepared for zeroed-out keys when iterating.
|
2012-03-02 09:13:53 +08:00
|
|
|
void blot(const KeyT &Key) {
|
2011-06-16 07:37:01 +08:00
|
|
|
typename MapTy::iterator It = Map.find(Key);
|
|
|
|
if (It == Map.end()) return;
|
|
|
|
Vector[It->second].first = KeyT();
|
|
|
|
Map.erase(It);
|
|
|
|
}
|
|
|
|
|
|
|
|
void clear() {
|
|
|
|
Map.clear();
|
|
|
|
Vector.clear();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// @}
|
|
|
|
///
|
|
|
|
/// \defgroup ARCUtilities Utility declarations/definitions specific to ARC.
|
|
|
|
/// @{
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \brief This is similar to StripPointerCastsAndObjCCalls but it stops as soon
|
|
|
|
/// as it finds a value with multiple uses.
|
2011-06-16 07:37:01 +08:00
|
|
|
static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
|
|
|
|
if (Arg->hasOneUse()) {
|
|
|
|
if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
|
|
|
|
return FindSingleUseIdentifiedObject(BC->getOperand(0));
|
|
|
|
if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
|
|
|
|
if (GEP->hasAllZeroIndices())
|
|
|
|
return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
|
|
|
|
if (IsForwarding(GetBasicInstructionClass(Arg)))
|
|
|
|
return FindSingleUseIdentifiedObject(
|
|
|
|
cast<CallInst>(Arg)->getArgOperand(0));
|
|
|
|
if (!IsObjCIdentifiedObject(Arg))
|
|
|
|
return 0;
|
|
|
|
return Arg;
|
|
|
|
}
|
|
|
|
|
2012-05-09 07:39:44 +08:00
|
|
|
// If we found an identifiable object but it has multiple uses, but they are
|
|
|
|
// trivial uses, we can still consider this to be a single-use value.
|
2011-06-16 07:37:01 +08:00
|
|
|
if (IsObjCIdentifiedObject(Arg)) {
|
2014-03-09 11:16:01 +08:00
|
|
|
for (const User *U : Arg->users())
|
2011-06-16 07:37:01 +08:00
|
|
|
if (!U->use_empty() || StripPointerCastsAndObjCCalls(U) != Arg)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
return Arg;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2013-05-14 07:49:42 +08:00
|
|
|
/// This is a wrapper around getUnderlyingObjCPtr along the lines of
|
|
|
|
/// GetUnderlyingObjects except that it returns early when it sees the first
|
|
|
|
/// alloca.
|
|
|
|
static inline bool AreAnyUnderlyingObjectsAnAlloca(const Value *V) {
|
|
|
|
SmallPtrSet<const Value *, 4> Visited;
|
|
|
|
SmallVector<const Value *, 4> Worklist;
|
|
|
|
Worklist.push_back(V);
|
|
|
|
do {
|
|
|
|
const Value *P = Worklist.pop_back_val();
|
|
|
|
P = GetUnderlyingObjCPtr(P);
|
2013-05-14 14:40:10 +08:00
|
|
|
|
2013-05-14 07:49:42 +08:00
|
|
|
if (isa<AllocaInst>(P))
|
|
|
|
return true;
|
2013-05-14 14:40:10 +08:00
|
|
|
|
2013-05-14 07:49:42 +08:00
|
|
|
if (!Visited.insert(P))
|
|
|
|
continue;
|
2013-05-14 14:40:10 +08:00
|
|
|
|
2013-05-14 07:49:42 +08:00
|
|
|
if (const SelectInst *SI = dyn_cast<const SelectInst>(P)) {
|
|
|
|
Worklist.push_back(SI->getTrueValue());
|
|
|
|
Worklist.push_back(SI->getFalseValue());
|
|
|
|
continue;
|
|
|
|
}
|
2013-05-14 14:40:10 +08:00
|
|
|
|
2013-05-14 07:49:42 +08:00
|
|
|
if (const PHINode *PN = dyn_cast<const PHINode>(P)) {
|
|
|
|
for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i)
|
|
|
|
Worklist.push_back(PN->getIncomingValue(i));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
} while (!Worklist.empty());
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// @}
|
|
|
|
///
|
|
|
|
/// \defgroup ARCOpt ARC Optimization.
|
|
|
|
/// @{
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
// TODO: On code like this:
|
|
|
|
//
|
|
|
|
// objc_retain(%x)
|
|
|
|
// stuff_that_cannot_release()
|
|
|
|
// objc_autorelease(%x)
|
|
|
|
// stuff_that_cannot_release()
|
|
|
|
// objc_retain(%x)
|
|
|
|
// stuff_that_cannot_release()
|
|
|
|
// objc_autorelease(%x)
|
|
|
|
//
|
|
|
|
// The second retain and autorelease can be deleted.
|
|
|
|
|
|
|
|
// TODO: It should be possible to delete
|
|
|
|
// objc_autoreleasePoolPush and objc_autoreleasePoolPop
|
|
|
|
// pairs if nothing is actually autoreleased between them. Also, autorelease
|
|
|
|
// calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
|
|
|
|
// after inlining) can be turned into plain release calls.
|
|
|
|
|
|
|
|
// TODO: Critical-edge splitting. If the optimial insertion point is
|
|
|
|
// a critical edge, the current algorithm has to fail, because it doesn't
|
|
|
|
// know how to split edges. It should be possible to make the optimizer
|
|
|
|
// think in terms of edges, rather than blocks, and then split critical
|
|
|
|
// edges on demand.
|
|
|
|
|
|
|
|
// TODO: OptimizeSequences could generalized to be Interprocedural.
|
|
|
|
|
|
|
|
// TODO: Recognize that a bunch of other objc runtime calls have
|
|
|
|
// non-escaping arguments and non-releasing arguments, and may be
|
|
|
|
// non-autoreleasing.
|
|
|
|
|
|
|
|
// TODO: Sink autorelease calls as far as possible. Unfortunately we
|
|
|
|
// usually can't sink them past other calls, which would be the main
|
|
|
|
// case where it would be useful.
|
|
|
|
|
2011-08-19 08:26:36 +08:00
|
|
|
// TODO: The pointer returned from objc_loadWeakRetained is retained.
|
|
|
|
|
|
|
|
// TODO: Delete release+retain pairs (rare).
|
2011-06-21 07:20:43 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
|
|
|
|
STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
|
|
|
|
STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
|
|
|
|
STATISTIC(NumRets, "Number of return value forwarding "
|
2013-05-16 01:43:03 +08:00
|
|
|
"retain+autoreleases eliminated");
|
2011-06-16 07:37:01 +08:00
|
|
|
STATISTIC(NumRRs, "Number of retain+release paths eliminated");
|
|
|
|
STATISTIC(NumPeeps, "Number of calls peephole-optimized");
|
2013-05-14 05:10:49 +08:00
|
|
|
#ifndef NDEBUG
|
2013-04-29 14:16:57 +08:00
|
|
|
STATISTIC(NumRetainsBeforeOpt,
|
2013-05-16 01:43:03 +08:00
|
|
|
"Number of retains before optimization");
|
2013-04-29 14:16:57 +08:00
|
|
|
STATISTIC(NumReleasesBeforeOpt,
|
2013-05-16 01:43:03 +08:00
|
|
|
"Number of releases before optimization");
|
2013-04-29 14:16:57 +08:00
|
|
|
STATISTIC(NumRetainsAfterOpt,
|
2013-05-16 01:43:03 +08:00
|
|
|
"Number of retains after optimization");
|
2013-04-29 14:16:57 +08:00
|
|
|
STATISTIC(NumReleasesAfterOpt,
|
2013-05-16 01:43:03 +08:00
|
|
|
"Number of releases after optimization");
|
2013-04-29 15:29:08 +08:00
|
|
|
#endif
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
namespace {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \enum Sequence
|
|
|
|
///
|
|
|
|
/// \brief A sequence of states that a pointer may go through in which an
|
|
|
|
/// objc_retain and objc_release are actually needed.
|
2011-06-16 07:37:01 +08:00
|
|
|
enum Sequence {
|
|
|
|
S_None,
|
2013-01-30 05:07:51 +08:00
|
|
|
S_Retain, ///< objc_retain(x).
|
|
|
|
S_CanRelease, ///< foo(x) -- x could possibly see a ref count decrement.
|
|
|
|
S_Use, ///< any use of x.
|
2013-01-30 05:39:02 +08:00
|
|
|
S_Stop, ///< like S_Release, but code motion is stopped.
|
2013-01-30 05:07:51 +08:00
|
|
|
S_Release, ///< objc_release(x).
|
2013-01-30 05:41:44 +08:00
|
|
|
S_MovableRelease ///< objc_release(x), !clang.imprecise_release.
|
2011-06-16 07:37:01 +08:00
|
|
|
};
|
2013-01-30 05:07:51 +08:00
|
|
|
|
|
|
|
raw_ostream &operator<<(raw_ostream &OS, const Sequence S)
|
|
|
|
LLVM_ATTRIBUTE_UNUSED;
|
|
|
|
raw_ostream &operator<<(raw_ostream &OS, const Sequence S) {
|
|
|
|
switch (S) {
|
|
|
|
case S_None:
|
|
|
|
return OS << "S_None";
|
|
|
|
case S_Retain:
|
|
|
|
return OS << "S_Retain";
|
|
|
|
case S_CanRelease:
|
|
|
|
return OS << "S_CanRelease";
|
|
|
|
case S_Use:
|
|
|
|
return OS << "S_Use";
|
|
|
|
case S_Release:
|
|
|
|
return OS << "S_Release";
|
|
|
|
case S_MovableRelease:
|
|
|
|
return OS << "S_MovableRelease";
|
|
|
|
case S_Stop:
|
|
|
|
return OS << "S_Stop";
|
|
|
|
}
|
|
|
|
llvm_unreachable("Unknown sequence type.");
|
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static Sequence MergeSeqs(Sequence A, Sequence B, bool TopDown) {
|
|
|
|
// The easy cases.
|
|
|
|
if (A == B)
|
|
|
|
return A;
|
|
|
|
if (A == S_None || B == S_None)
|
|
|
|
return S_None;
|
|
|
|
|
|
|
|
if (A > B) std::swap(A, B);
|
|
|
|
if (TopDown) {
|
|
|
|
// Choose the side which is further along in the sequence.
|
2011-08-12 08:26:31 +08:00
|
|
|
if ((A == S_Retain || A == S_CanRelease) &&
|
|
|
|
(B == S_CanRelease || B == S_Use))
|
2011-06-16 07:37:01 +08:00
|
|
|
return B;
|
|
|
|
} else {
|
|
|
|
// Choose the side which is further along in the sequence.
|
|
|
|
if ((A == S_Use || A == S_CanRelease) &&
|
2011-08-12 08:26:31 +08:00
|
|
|
(B == S_Use || B == S_Release || B == S_Stop || B == S_MovableRelease))
|
2011-06-16 07:37:01 +08:00
|
|
|
return A;
|
|
|
|
// If both sides are releases, choose the more conservative one.
|
|
|
|
if (A == S_Stop && (B == S_Release || B == S_MovableRelease))
|
|
|
|
return A;
|
|
|
|
if (A == S_Release && B == S_MovableRelease)
|
|
|
|
return A;
|
|
|
|
}
|
|
|
|
|
|
|
|
return S_None;
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \brief Unidirectional information about either a
|
2011-06-16 07:37:01 +08:00
|
|
|
/// retain-decrement-use-release sequence or release-use-decrement-retain
|
2013-04-10 06:15:51 +08:00
|
|
|
/// reverse sequence.
|
2011-06-16 07:37:01 +08:00
|
|
|
struct RRInfo {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// After an objc_retain, the reference count of the referenced
|
2011-08-19 08:26:36 +08:00
|
|
|
/// object is known to be positive. Similarly, before an objc_release, the
|
|
|
|
/// reference count of the referenced object is known to be positive. If
|
|
|
|
/// there are retain-release pairs in code regions where the retain count
|
|
|
|
/// is known to be positive, they can be eliminated, regardless of any side
|
|
|
|
/// effects between them.
|
|
|
|
///
|
|
|
|
/// Also, a retain+release pair nested within another retain+release
|
|
|
|
/// pair all on the known same pointer value can be eliminated, regardless
|
|
|
|
/// of any intervening side effects.
|
|
|
|
///
|
|
|
|
/// KnownSafe is true when either of these conditions is satisfied.
|
|
|
|
bool KnownSafe;
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// True of the objc_release calls are all marked with the "tail" keyword.
|
2011-06-16 07:37:01 +08:00
|
|
|
bool IsTailCallRelease;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// If the Calls are objc_release calls and they all have a
|
|
|
|
/// clang.imprecise_release tag, this is the metadata tag.
|
2011-06-16 07:37:01 +08:00
|
|
|
MDNode *ReleaseMetadata;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// For a top-down sequence, the set of objc_retains or
|
2011-06-16 07:37:01 +08:00
|
|
|
/// objc_retainBlocks. For bottom-up, the set of objc_releases.
|
|
|
|
SmallPtrSet<Instruction *, 2> Calls;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The set of optimal insert positions for moving calls in the opposite
|
|
|
|
/// sequence.
|
2011-06-16 07:37:01 +08:00
|
|
|
SmallPtrSet<Instruction *, 2> ReverseInsertPts;
|
|
|
|
|
2013-05-25 04:44:05 +08:00
|
|
|
/// If this is true, we cannot perform code motion but can still remove
|
|
|
|
/// retain/release pairs.
|
|
|
|
bool CFGHazardAfflicted;
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
RRInfo() :
|
2013-05-25 04:44:05 +08:00
|
|
|
KnownSafe(false), IsTailCallRelease(false), ReleaseMetadata(0),
|
|
|
|
CFGHazardAfflicted(false) {}
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
void clear();
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-06-21 13:42:08 +08:00
|
|
|
/// Conservatively merge the two RRInfo. Returns true if a partial merge has
|
2014-01-25 01:20:08 +08:00
|
|
|
/// occurred, false otherwise.
|
2013-06-21 13:42:08 +08:00
|
|
|
bool Merge(const RRInfo &Other);
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
void RRInfo::clear() {
|
2011-08-19 08:26:36 +08:00
|
|
|
KnownSafe = false;
|
2011-06-16 07:37:01 +08:00
|
|
|
IsTailCallRelease = false;
|
|
|
|
ReleaseMetadata = 0;
|
|
|
|
Calls.clear();
|
|
|
|
ReverseInsertPts.clear();
|
2013-05-25 04:44:05 +08:00
|
|
|
CFGHazardAfflicted = false;
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-06-21 13:42:08 +08:00
|
|
|
bool RRInfo::Merge(const RRInfo &Other) {
|
|
|
|
// Conservatively merge the ReleaseMetadata information.
|
|
|
|
if (ReleaseMetadata != Other.ReleaseMetadata)
|
|
|
|
ReleaseMetadata = 0;
|
|
|
|
|
|
|
|
// Conservatively merge the boolean state.
|
|
|
|
KnownSafe &= Other.KnownSafe;
|
|
|
|
IsTailCallRelease &= Other.IsTailCallRelease;
|
|
|
|
CFGHazardAfflicted |= Other.CFGHazardAfflicted;
|
|
|
|
|
|
|
|
// Merge the call sets.
|
|
|
|
Calls.insert(Other.Calls.begin(), Other.Calls.end());
|
|
|
|
|
|
|
|
// Merge the insert point sets. If there are any differences,
|
|
|
|
// that makes this a partial merge.
|
|
|
|
bool Partial = ReverseInsertPts.size() != Other.ReverseInsertPts.size();
|
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
I = Other.ReverseInsertPts.begin(),
|
|
|
|
E = Other.ReverseInsertPts.end(); I != E; ++I)
|
|
|
|
Partial |= ReverseInsertPts.insert(*I);
|
|
|
|
return Partial;
|
|
|
|
}
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
namespace {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \brief This class summarizes several per-pointer runtime properties which
|
|
|
|
/// are propogated through the flow graph.
|
2011-06-16 07:37:01 +08:00
|
|
|
class PtrState {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// True if the reference count is known to be incremented.
|
2012-04-25 08:50:46 +08:00
|
|
|
bool KnownPositiveRefCount;
|
|
|
|
|
2013-04-10 06:15:51 +08:00
|
|
|
/// True if we've seen an opportunity for partial RR elimination, such as
|
2013-01-14 08:35:14 +08:00
|
|
|
/// pushing calls into a CFG triangle or into one side of a CFG diamond.
|
2012-04-25 08:50:46 +08:00
|
|
|
bool Partial;
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The current position in the sequence.
|
2013-12-01 11:36:07 +08:00
|
|
|
unsigned char Seq : 8;
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Unidirectional information about the current sequence.
|
2011-06-16 07:37:01 +08:00
|
|
|
RRInfo RRI;
|
|
|
|
|
2013-06-22 03:44:30 +08:00
|
|
|
public:
|
2012-09-05 07:16:20 +08:00
|
|
|
PtrState() : KnownPositiveRefCount(false), Partial(false),
|
2012-05-09 07:39:44 +08:00
|
|
|
Seq(S_None) {}
|
2011-08-12 08:26:31 +08:00
|
|
|
|
2013-06-21 14:59:02 +08:00
|
|
|
|
|
|
|
bool IsKnownSafe() const {
|
2013-07-06 09:41:35 +08:00
|
|
|
return RRI.KnownSafe;
|
2013-06-21 14:59:02 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void SetKnownSafe(const bool NewValue) {
|
|
|
|
RRI.KnownSafe = NewValue;
|
|
|
|
}
|
|
|
|
|
2013-06-21 15:00:44 +08:00
|
|
|
bool IsTailCallRelease() const {
|
|
|
|
return RRI.IsTailCallRelease;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetTailCallRelease(const bool NewValue) {
|
|
|
|
RRI.IsTailCallRelease = NewValue;
|
|
|
|
}
|
|
|
|
|
2013-06-22 04:52:49 +08:00
|
|
|
bool IsTrackingImpreciseReleases() const {
|
2013-06-22 03:12:38 +08:00
|
|
|
return RRI.ReleaseMetadata != 0;
|
|
|
|
}
|
|
|
|
|
2013-06-21 15:03:07 +08:00
|
|
|
const MDNode *GetReleaseMetadata() const {
|
|
|
|
return RRI.ReleaseMetadata;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetReleaseMetadata(MDNode *NewValue) {
|
|
|
|
RRI.ReleaseMetadata = NewValue;
|
|
|
|
}
|
|
|
|
|
2013-06-22 03:12:36 +08:00
|
|
|
bool IsCFGHazardAfflicted() const {
|
|
|
|
return RRI.CFGHazardAfflicted;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetCFGHazardAfflicted(const bool NewValue) {
|
|
|
|
RRI.CFGHazardAfflicted = NewValue;
|
|
|
|
}
|
|
|
|
|
2013-02-06 03:32:18 +08:00
|
|
|
void SetKnownPositiveRefCount() {
|
2013-05-14 08:08:09 +08:00
|
|
|
DEBUG(dbgs() << "Setting Known Positive.\n");
|
2012-04-25 08:50:46 +08:00
|
|
|
KnownPositiveRefCount = true;
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-03-23 13:46:19 +08:00
|
|
|
void ClearKnownPositiveRefCount() {
|
2013-05-14 08:08:09 +08:00
|
|
|
DEBUG(dbgs() << "Clearing Known Positive.\n");
|
2012-04-25 08:50:46 +08:00
|
|
|
KnownPositiveRefCount = false;
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-03-23 13:31:01 +08:00
|
|
|
bool HasKnownPositiveRefCount() const {
|
2012-04-25 08:50:46 +08:00
|
|
|
return KnownPositiveRefCount;
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-02-06 03:32:18 +08:00
|
|
|
void SetSeq(Sequence NewSeq) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Old: " << Seq << "; New: " << NewSeq << "\n");
|
2013-04-06 07:46:45 +08:00
|
|
|
Seq = NewSeq;
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-02-06 03:32:18 +08:00
|
|
|
Sequence GetSeq() const {
|
2013-12-01 11:36:07 +08:00
|
|
|
return static_cast<Sequence>(Seq);
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-02-06 03:32:18 +08:00
|
|
|
void ClearSequenceProgress() {
|
2012-04-25 08:50:46 +08:00
|
|
|
ResetSequenceProgress(S_None);
|
|
|
|
}
|
|
|
|
|
2013-02-06 03:32:18 +08:00
|
|
|
void ResetSequenceProgress(Sequence NewSeq) {
|
2013-04-21 07:36:57 +08:00
|
|
|
DEBUG(dbgs() << "Resetting sequence progress.\n");
|
2013-04-06 02:10:41 +08:00
|
|
|
SetSeq(NewSeq);
|
2012-04-25 08:50:46 +08:00
|
|
|
Partial = false;
|
2011-06-16 07:37:01 +08:00
|
|
|
RRI.clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Merge(const PtrState &Other, bool TopDown);
|
2013-06-22 03:44:27 +08:00
|
|
|
|
|
|
|
void InsertCall(Instruction *I) {
|
|
|
|
RRI.Calls.insert(I);
|
|
|
|
}
|
|
|
|
|
|
|
|
void InsertReverseInsertPt(Instruction *I) {
|
|
|
|
RRI.ReverseInsertPts.insert(I);
|
|
|
|
}
|
|
|
|
|
|
|
|
void ClearReverseInsertPts() {
|
|
|
|
RRI.ReverseInsertPts.clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool HasReverseInsertPts() const {
|
|
|
|
return !RRI.ReverseInsertPts.empty();
|
|
|
|
}
|
2013-06-22 03:44:30 +08:00
|
|
|
|
|
|
|
const RRInfo &GetRRInfo() const {
|
|
|
|
return RRI;
|
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
PtrState::Merge(const PtrState &Other, bool TopDown) {
|
2013-12-01 11:40:42 +08:00
|
|
|
Seq = MergeSeqs(GetSeq(), Other.GetSeq(), TopDown);
|
2013-06-21 14:54:31 +08:00
|
|
|
KnownPositiveRefCount &= Other.KnownPositiveRefCount;
|
2013-03-29 13:13:07 +08:00
|
|
|
|
2011-10-18 02:48:25 +08:00
|
|
|
// If we're not in a sequence (anymore), drop all associated state.
|
2011-06-16 07:37:01 +08:00
|
|
|
if (Seq == S_None) {
|
2012-04-25 08:50:46 +08:00
|
|
|
Partial = false;
|
2011-06-16 07:37:01 +08:00
|
|
|
RRI.clear();
|
2012-04-25 08:50:46 +08:00
|
|
|
} else if (Partial || Other.Partial) {
|
2011-10-18 02:48:25 +08:00
|
|
|
// If we're doing a merge on a path that's previously seen a partial
|
|
|
|
// merge, conservatively drop the sequence, to avoid doing partial
|
|
|
|
// RR elimination. If the branch predicates for the two merge differ,
|
|
|
|
// mixing them is unsafe.
|
2012-04-25 08:50:46 +08:00
|
|
|
ClearSequenceProgress();
|
2011-06-16 07:37:01 +08:00
|
|
|
} else {
|
2013-06-21 14:54:31 +08:00
|
|
|
// Otherwise merge the other PtrState's RRInfo into our RRInfo. At this
|
|
|
|
// point, we know that currently we are not partial. Stash whether or not
|
|
|
|
// the merge operation caused us to undergo a partial merging of reverse
|
|
|
|
// insertion points.
|
2013-06-21 13:42:08 +08:00
|
|
|
Partial = RRI.Merge(Other.RRI);
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \brief Per-BasicBlock state.
|
2011-06-16 07:37:01 +08:00
|
|
|
class BBState {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The number of unique control paths from the entry which can reach this
|
|
|
|
/// block.
|
2011-06-16 07:37:01 +08:00
|
|
|
unsigned TopDownPathCount;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The number of unique control paths to exits from this block.
|
2011-06-16 07:37:01 +08:00
|
|
|
unsigned BottomUpPathCount;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// A type for PerPtrTopDown and PerPtrBottomUp.
|
2011-06-16 07:37:01 +08:00
|
|
|
typedef MapVector<const Value *, PtrState> MapTy;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The top-down traversal uses this to record information known about a
|
|
|
|
/// pointer at the bottom of each block.
|
2011-06-16 07:37:01 +08:00
|
|
|
MapTy PerPtrTopDown;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The bottom-up traversal uses this to record information known about a
|
|
|
|
/// pointer at the top of each block.
|
2011-06-16 07:37:01 +08:00
|
|
|
MapTy PerPtrBottomUp;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Effective predecessors of the current block ignoring ignorable edges and
|
|
|
|
/// ignored backedges.
|
2012-04-25 06:53:18 +08:00
|
|
|
SmallVector<BasicBlock *, 2> Preds;
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Effective successors of the current block ignoring ignorable edges and
|
|
|
|
/// ignored backedges.
|
2012-04-25 06:53:18 +08:00
|
|
|
SmallVector<BasicBlock *, 2> Succs;
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
public:
|
2013-08-10 07:22:27 +08:00
|
|
|
static const unsigned OverflowOccurredValue;
|
|
|
|
|
|
|
|
BBState() : TopDownPathCount(0), BottomUpPathCount(0) { }
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
typedef MapTy::iterator ptr_iterator;
|
|
|
|
typedef MapTy::const_iterator ptr_const_iterator;
|
|
|
|
|
|
|
|
ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
|
|
|
|
ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
|
|
|
|
ptr_const_iterator top_down_ptr_begin() const {
|
|
|
|
return PerPtrTopDown.begin();
|
|
|
|
}
|
|
|
|
ptr_const_iterator top_down_ptr_end() const {
|
|
|
|
return PerPtrTopDown.end();
|
|
|
|
}
|
|
|
|
|
|
|
|
ptr_iterator bottom_up_ptr_begin() { return PerPtrBottomUp.begin(); }
|
|
|
|
ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
|
|
|
|
ptr_const_iterator bottom_up_ptr_begin() const {
|
|
|
|
return PerPtrBottomUp.begin();
|
|
|
|
}
|
|
|
|
ptr_const_iterator bottom_up_ptr_end() const {
|
|
|
|
return PerPtrBottomUp.end();
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Mark this block as being an entry block, which has one path from the
|
|
|
|
/// entry by definition.
|
2011-06-16 07:37:01 +08:00
|
|
|
void SetAsEntry() { TopDownPathCount = 1; }
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Mark this block as being an exit block, which has one path to an exit by
|
|
|
|
/// definition.
|
2011-06-16 07:37:01 +08:00
|
|
|
void SetAsExit() { BottomUpPathCount = 1; }
|
|
|
|
|
2013-05-14 03:40:39 +08:00
|
|
|
/// Attempt to find the PtrState object describing the top down state for
|
|
|
|
/// pointer Arg. Return a new initialized PtrState describing the top down
|
|
|
|
/// state for Arg if we do not find one.
|
2011-06-16 07:37:01 +08:00
|
|
|
PtrState &getPtrTopDownState(const Value *Arg) {
|
|
|
|
return PerPtrTopDown[Arg];
|
|
|
|
}
|
|
|
|
|
2013-05-14 03:40:39 +08:00
|
|
|
/// Attempt to find the PtrState object describing the bottom up state for
|
|
|
|
/// pointer Arg. Return a new initialized PtrState describing the bottom up
|
|
|
|
/// state for Arg if we do not find one.
|
2011-06-16 07:37:01 +08:00
|
|
|
PtrState &getPtrBottomUpState(const Value *Arg) {
|
|
|
|
return PerPtrBottomUp[Arg];
|
|
|
|
}
|
|
|
|
|
2013-05-14 07:49:42 +08:00
|
|
|
/// Attempt to find the PtrState object describing the bottom up state for
|
|
|
|
/// pointer Arg.
|
|
|
|
ptr_iterator findPtrBottomUpState(const Value *Arg) {
|
|
|
|
return PerPtrBottomUp.find(Arg);
|
|
|
|
}
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
void clearBottomUpPointers() {
|
2011-08-05 02:40:26 +08:00
|
|
|
PerPtrBottomUp.clear();
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void clearTopDownPointers() {
|
|
|
|
PerPtrTopDown.clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
void InitFromPred(const BBState &Other);
|
|
|
|
void InitFromSucc(const BBState &Other);
|
|
|
|
void MergePred(const BBState &Other);
|
|
|
|
void MergeSucc(const BBState &Other);
|
|
|
|
|
2013-06-07 14:16:49 +08:00
|
|
|
/// Compute the number of possible unique paths from an entry to an exit
|
2013-01-14 08:35:14 +08:00
|
|
|
/// which pass through this block. This is only valid after both the
|
|
|
|
/// top-down and bottom-up traversals are complete.
|
2013-06-07 14:16:49 +08:00
|
|
|
///
|
2014-01-25 01:20:08 +08:00
|
|
|
/// Returns true if overflow occurred. Returns false if overflow did not
|
2013-06-07 14:16:49 +08:00
|
|
|
/// occur.
|
|
|
|
bool GetAllPathCountWithOverflow(unsigned &PathCount) const {
|
2013-08-10 07:22:27 +08:00
|
|
|
if (TopDownPathCount == OverflowOccurredValue ||
|
|
|
|
BottomUpPathCount == OverflowOccurredValue)
|
|
|
|
return true;
|
2013-06-07 14:16:49 +08:00
|
|
|
unsigned long long Product =
|
|
|
|
(unsigned long long)TopDownPathCount*BottomUpPathCount;
|
2014-01-25 01:20:08 +08:00
|
|
|
// Overflow occurred if any of the upper bits of Product are set or if all
|
2013-08-10 07:22:27 +08:00
|
|
|
// the lower bits of Product are all set.
|
|
|
|
return (Product >> 32) ||
|
|
|
|
((PathCount = Product) == OverflowOccurredValue);
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
2011-08-12 08:26:31 +08:00
|
|
|
|
2012-04-25 06:53:18 +08:00
|
|
|
// Specialized CFG utilities.
|
2012-04-28 02:56:31 +08:00
|
|
|
typedef SmallVectorImpl<BasicBlock *>::const_iterator edge_iterator;
|
2013-08-08 07:56:34 +08:00
|
|
|
edge_iterator pred_begin() const { return Preds.begin(); }
|
|
|
|
edge_iterator pred_end() const { return Preds.end(); }
|
|
|
|
edge_iterator succ_begin() const { return Succs.begin(); }
|
|
|
|
edge_iterator succ_end() const { return Succs.end(); }
|
2012-04-25 06:53:18 +08:00
|
|
|
|
|
|
|
void addSucc(BasicBlock *Succ) { Succs.push_back(Succ); }
|
|
|
|
void addPred(BasicBlock *Pred) { Preds.push_back(Pred); }
|
|
|
|
|
|
|
|
bool isExit() const { return Succs.empty(); }
|
2011-06-16 07:37:01 +08:00
|
|
|
};
|
2013-08-10 07:22:27 +08:00
|
|
|
|
|
|
|
const unsigned BBState::OverflowOccurredValue = 0xffffffff;
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void BBState::InitFromPred(const BBState &Other) {
|
|
|
|
PerPtrTopDown = Other.PerPtrTopDown;
|
|
|
|
TopDownPathCount = Other.TopDownPathCount;
|
|
|
|
}
|
|
|
|
|
|
|
|
void BBState::InitFromSucc(const BBState &Other) {
|
|
|
|
PerPtrBottomUp = Other.PerPtrBottomUp;
|
|
|
|
BottomUpPathCount = Other.BottomUpPathCount;
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The top-down traversal uses this to merge information about predecessors to
|
|
|
|
/// form the initial state for a new block.
|
2011-06-16 07:37:01 +08:00
|
|
|
void BBState::MergePred(const BBState &Other) {
|
2013-08-10 07:22:27 +08:00
|
|
|
if (TopDownPathCount == OverflowOccurredValue)
|
|
|
|
return;
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// Other.TopDownPathCount can be 0, in which case it is either dead or a
|
|
|
|
// loop backedge. Loop backedges are special.
|
|
|
|
TopDownPathCount += Other.TopDownPathCount;
|
|
|
|
|
2013-08-10 07:22:27 +08:00
|
|
|
// In order to be consistent, we clear the top down pointers when by adding
|
|
|
|
// TopDownPathCount becomes OverflowOccurredValue even though "true" overflow
|
2014-01-25 01:20:08 +08:00
|
|
|
// has not occurred.
|
2013-08-10 07:22:27 +08:00
|
|
|
if (TopDownPathCount == OverflowOccurredValue) {
|
|
|
|
clearTopDownPointers();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2013-01-14 09:47:53 +08:00
|
|
|
// Check for overflow. If we have overflow, fall back to conservative
|
|
|
|
// behavior.
|
2012-09-13 04:45:17 +08:00
|
|
|
if (TopDownPathCount < Other.TopDownPathCount) {
|
2013-08-10 07:22:27 +08:00
|
|
|
TopDownPathCount = OverflowOccurredValue;
|
2012-09-13 04:45:17 +08:00
|
|
|
clearTopDownPointers();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// For each entry in the other set, if our set has an entry with the same key,
|
|
|
|
// merge the entries. Otherwise, copy the entry and merge it with an empty
|
|
|
|
// entry.
|
|
|
|
for (ptr_const_iterator MI = Other.top_down_ptr_begin(),
|
|
|
|
ME = Other.top_down_ptr_end(); MI != ME; ++MI) {
|
|
|
|
std::pair<ptr_iterator, bool> Pair = PerPtrTopDown.insert(*MI);
|
|
|
|
Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
|
|
|
|
/*TopDown=*/true);
|
|
|
|
}
|
|
|
|
|
2011-08-12 05:06:32 +08:00
|
|
|
// For each entry in our set, if the other set doesn't have an entry with the
|
2011-06-16 07:37:01 +08:00
|
|
|
// same key, force it to merge with an empty entry.
|
|
|
|
for (ptr_iterator MI = top_down_ptr_begin(),
|
|
|
|
ME = top_down_ptr_end(); MI != ME; ++MI)
|
|
|
|
if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
|
|
|
|
MI->second.Merge(PtrState(), /*TopDown=*/true);
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The bottom-up traversal uses this to merge information about successors to
|
|
|
|
/// form the initial state for a new block.
|
2011-06-16 07:37:01 +08:00
|
|
|
void BBState::MergeSucc(const BBState &Other) {
|
2013-08-10 07:22:27 +08:00
|
|
|
if (BottomUpPathCount == OverflowOccurredValue)
|
|
|
|
return;
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// Other.BottomUpPathCount can be 0, in which case it is either dead or a
|
|
|
|
// loop backedge. Loop backedges are special.
|
|
|
|
BottomUpPathCount += Other.BottomUpPathCount;
|
|
|
|
|
2013-08-10 07:22:27 +08:00
|
|
|
// In order to be consistent, we clear the top down pointers when by adding
|
|
|
|
// BottomUpPathCount becomes OverflowOccurredValue even though "true" overflow
|
2014-01-25 01:20:08 +08:00
|
|
|
// has not occurred.
|
2013-08-10 07:22:27 +08:00
|
|
|
if (BottomUpPathCount == OverflowOccurredValue) {
|
|
|
|
clearBottomUpPointers();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2013-01-14 09:47:53 +08:00
|
|
|
// Check for overflow. If we have overflow, fall back to conservative
|
|
|
|
// behavior.
|
2012-09-13 04:45:17 +08:00
|
|
|
if (BottomUpPathCount < Other.BottomUpPathCount) {
|
2013-08-10 07:22:27 +08:00
|
|
|
BottomUpPathCount = OverflowOccurredValue;
|
2012-09-13 04:45:17 +08:00
|
|
|
clearBottomUpPointers();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// For each entry in the other set, if our set has an entry with the
|
|
|
|
// same key, merge the entries. Otherwise, copy the entry and merge
|
|
|
|
// it with an empty entry.
|
|
|
|
for (ptr_const_iterator MI = Other.bottom_up_ptr_begin(),
|
|
|
|
ME = Other.bottom_up_ptr_end(); MI != ME; ++MI) {
|
|
|
|
std::pair<ptr_iterator, bool> Pair = PerPtrBottomUp.insert(*MI);
|
|
|
|
Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
|
|
|
|
/*TopDown=*/false);
|
|
|
|
}
|
|
|
|
|
2011-08-12 05:06:32 +08:00
|
|
|
// For each entry in our set, if the other set doesn't have an entry
|
2011-06-16 07:37:01 +08:00
|
|
|
// with the same key, force it to merge with an empty entry.
|
|
|
|
for (ptr_iterator MI = bottom_up_ptr_begin(),
|
|
|
|
ME = bottom_up_ptr_end(); MI != ME; ++MI)
|
|
|
|
if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
|
|
|
|
MI->second.Merge(PtrState(), /*TopDown=*/false);
|
|
|
|
}
|
|
|
|
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
// Only enable ARC Annotations if we are building a debug version of
|
|
|
|
// libObjCARCOpts.
|
|
|
|
#ifndef NDEBUG
|
|
|
|
#define ARC_ANNOTATIONS
|
|
|
|
#endif
|
|
|
|
|
|
|
|
// Define some macros along the lines of DEBUG and some helper functions to make
|
|
|
|
// it cleaner to create annotations in the source code and to no-op when not
|
|
|
|
// building in debug mode.
|
|
|
|
#ifdef ARC_ANNOTATIONS
|
|
|
|
|
|
|
|
#include "llvm/Support/CommandLine.h"
|
|
|
|
|
|
|
|
/// Enable/disable ARC sequence annotations.
|
|
|
|
static cl::opt<bool>
|
2013-04-18 04:48:03 +08:00
|
|
|
EnableARCAnnotations("enable-objc-arc-annotations", cl::init(false),
|
|
|
|
cl::desc("Enable emission of arc data flow analysis "
|
|
|
|
"annotations"));
|
2013-04-18 04:48:01 +08:00
|
|
|
static cl::opt<bool>
|
2013-04-18 05:03:53 +08:00
|
|
|
DisableCheckForCFGHazards("disable-objc-arc-checkforcfghazards", cl::init(false),
|
|
|
|
cl::desc("Disable check for cfg hazards when "
|
|
|
|
"annotating"));
|
2013-04-18 05:59:41 +08:00
|
|
|
static cl::opt<std::string>
|
|
|
|
ARCAnnotationTargetIdentifier("objc-arc-annotation-target-identifier",
|
|
|
|
cl::init(""),
|
|
|
|
cl::desc("filter out all data flow annotations "
|
|
|
|
"but those that apply to the given "
|
|
|
|
"target llvm identifier."));
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
|
|
|
|
/// This function appends a unique ARCAnnotationProvenanceSourceMDKind id to an
|
|
|
|
/// instruction so that we can track backwards when post processing via the llvm
|
|
|
|
/// arc annotation processor tool. If the function is an
|
|
|
|
static MDString *AppendMDNodeToSourcePtr(unsigned NodeId,
|
|
|
|
Value *Ptr) {
|
|
|
|
MDString *Hash = 0;
|
|
|
|
|
|
|
|
// If pointer is a result of an instruction and it does not have a source
|
|
|
|
// MDNode it, attach a new MDNode onto it. If pointer is a result of
|
|
|
|
// an instruction and does have a source MDNode attached to it, return a
|
|
|
|
// reference to said Node. Otherwise just return 0.
|
|
|
|
if (Instruction *Inst = dyn_cast<Instruction>(Ptr)) {
|
|
|
|
MDNode *Node;
|
|
|
|
if (!(Node = Inst->getMetadata(NodeId))) {
|
|
|
|
// We do not have any node. Generate and attatch the hash MDString to the
|
|
|
|
// instruction.
|
|
|
|
|
|
|
|
// We just use an MDString to ensure that this metadata gets written out
|
|
|
|
// of line at the module level and to provide a very simple format
|
|
|
|
// encoding the information herein. Both of these makes it simpler to
|
|
|
|
// parse the annotations by a simple external program.
|
|
|
|
std::string Str;
|
|
|
|
raw_string_ostream os(Str);
|
|
|
|
os << "(" << Inst->getParent()->getParent()->getName() << ",%"
|
|
|
|
<< Inst->getName() << ")";
|
|
|
|
|
|
|
|
Hash = MDString::get(Inst->getContext(), os.str());
|
|
|
|
Inst->setMetadata(NodeId, MDNode::get(Inst->getContext(),Hash));
|
|
|
|
} else {
|
|
|
|
// We have a node. Grab its hash and return it.
|
|
|
|
assert(Node->getNumOperands() == 1 &&
|
|
|
|
"An ARCAnnotationProvenanceSourceMDKind can only have 1 operand.");
|
|
|
|
Hash = cast<MDString>(Node->getOperand(0));
|
|
|
|
}
|
|
|
|
} else if (Argument *Arg = dyn_cast<Argument>(Ptr)) {
|
|
|
|
std::string str;
|
|
|
|
raw_string_ostream os(str);
|
|
|
|
os << "(" << Arg->getParent()->getName() << ",%" << Arg->getName()
|
|
|
|
<< ")";
|
|
|
|
Hash = MDString::get(Arg->getContext(), os.str());
|
|
|
|
}
|
|
|
|
|
|
|
|
return Hash;
|
|
|
|
}
|
|
|
|
|
2013-03-26 08:42:09 +08:00
|
|
|
static std::string SequenceToString(Sequence A) {
|
|
|
|
std::string str;
|
|
|
|
raw_string_ostream os(str);
|
|
|
|
os << A;
|
|
|
|
return os.str();
|
|
|
|
}
|
|
|
|
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
/// Helper function to change a Sequence into a String object using our overload
|
|
|
|
/// for raw_ostream so we only have printing code in one location.
|
|
|
|
static MDString *SequenceToMDString(LLVMContext &Context,
|
|
|
|
Sequence A) {
|
2013-03-26 08:42:09 +08:00
|
|
|
return MDString::get(Context, SequenceToString(A));
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// A simple function to generate a MDNode which describes the change in state
|
|
|
|
/// for Value *Ptr caused by Instruction *Inst.
|
|
|
|
static void AppendMDNodeToInstForPtr(unsigned NodeId,
|
|
|
|
Instruction *Inst,
|
|
|
|
Value *Ptr,
|
|
|
|
MDString *PtrSourceMDNodeID,
|
|
|
|
Sequence OldSeq,
|
|
|
|
Sequence NewSeq) {
|
|
|
|
MDNode *Node = 0;
|
|
|
|
Value *tmp[3] = {PtrSourceMDNodeID,
|
|
|
|
SequenceToMDString(Inst->getContext(),
|
|
|
|
OldSeq),
|
|
|
|
SequenceToMDString(Inst->getContext(),
|
|
|
|
NewSeq)};
|
|
|
|
Node = MDNode::get(Inst->getContext(),
|
|
|
|
ArrayRef<Value*>(tmp, 3));
|
|
|
|
|
|
|
|
Inst->setMetadata(NodeId, Node);
|
|
|
|
}
|
|
|
|
|
2013-03-26 08:42:09 +08:00
|
|
|
/// Add to the beginning of the basic block llvm.ptr.annotations which show the
|
|
|
|
/// state of a pointer at the entrance to a basic block.
|
|
|
|
static void GenerateARCBBEntranceAnnotation(const char *Name, BasicBlock *BB,
|
|
|
|
Value *Ptr, Sequence Seq) {
|
2013-04-18 05:59:41 +08:00
|
|
|
// If we have a target identifier, make sure that we match it before
|
|
|
|
// continuing.
|
|
|
|
if(!ARCAnnotationTargetIdentifier.empty() &&
|
|
|
|
!Ptr->getName().equals(ARCAnnotationTargetIdentifier))
|
|
|
|
return;
|
2013-04-18 12:34:11 +08:00
|
|
|
|
2013-03-26 08:42:09 +08:00
|
|
|
Module *M = BB->getParent()->getParent();
|
|
|
|
LLVMContext &C = M->getContext();
|
|
|
|
Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
|
|
|
|
Type *I8XX = PointerType::getUnqual(I8X);
|
|
|
|
Type *Params[] = {I8XX, I8XX};
|
|
|
|
FunctionType *FTy = FunctionType::get(Type::getVoidTy(C),
|
|
|
|
ArrayRef<Type*>(Params, 2),
|
|
|
|
/*isVarArg=*/false);
|
|
|
|
Constant *Callee = M->getOrInsertFunction(Name, FTy);
|
2013-03-29 13:13:07 +08:00
|
|
|
|
|
|
|
IRBuilder<> Builder(BB, BB->getFirstInsertionPt());
|
|
|
|
|
2013-03-26 08:42:09 +08:00
|
|
|
Value *PtrName;
|
|
|
|
StringRef Tmp = Ptr->getName();
|
|
|
|
if (0 == (PtrName = M->getGlobalVariable(Tmp, true))) {
|
|
|
|
Value *ActualPtrName = Builder.CreateGlobalStringPtr(Tmp,
|
|
|
|
Tmp + "_STR");
|
|
|
|
PtrName = new GlobalVariable(*M, I8X, true, GlobalVariable::InternalLinkage,
|
2013-03-29 13:13:07 +08:00
|
|
|
cast<Constant>(ActualPtrName), Tmp);
|
2013-03-26 08:42:09 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
Value *S;
|
|
|
|
std::string SeqStr = SequenceToString(Seq);
|
|
|
|
if (0 == (S = M->getGlobalVariable(SeqStr, true))) {
|
|
|
|
Value *ActualPtrName = Builder.CreateGlobalStringPtr(SeqStr,
|
|
|
|
SeqStr + "_STR");
|
|
|
|
S = new GlobalVariable(*M, I8X, true, GlobalVariable::InternalLinkage,
|
|
|
|
cast<Constant>(ActualPtrName), SeqStr);
|
|
|
|
}
|
|
|
|
|
|
|
|
Builder.CreateCall2(Callee, PtrName, S);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Add to the end of the basic block llvm.ptr.annotations which show the state
|
|
|
|
/// of the pointer at the bottom of the basic block.
|
|
|
|
static void GenerateARCBBTerminatorAnnotation(const char *Name, BasicBlock *BB,
|
|
|
|
Value *Ptr, Sequence Seq) {
|
2013-04-18 05:59:41 +08:00
|
|
|
// If we have a target identifier, make sure that we match it before emitting
|
|
|
|
// an annotation.
|
|
|
|
if(!ARCAnnotationTargetIdentifier.empty() &&
|
|
|
|
!Ptr->getName().equals(ARCAnnotationTargetIdentifier))
|
|
|
|
return;
|
2013-04-18 12:34:11 +08:00
|
|
|
|
2013-03-26 08:42:09 +08:00
|
|
|
Module *M = BB->getParent()->getParent();
|
|
|
|
LLVMContext &C = M->getContext();
|
|
|
|
Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
|
|
|
|
Type *I8XX = PointerType::getUnqual(I8X);
|
|
|
|
Type *Params[] = {I8XX, I8XX};
|
|
|
|
FunctionType *FTy = FunctionType::get(Type::getVoidTy(C),
|
|
|
|
ArrayRef<Type*>(Params, 2),
|
|
|
|
/*isVarArg=*/false);
|
|
|
|
Constant *Callee = M->getOrInsertFunction(Name, FTy);
|
2013-03-29 13:13:07 +08:00
|
|
|
|
2014-03-02 20:27:27 +08:00
|
|
|
IRBuilder<> Builder(BB, std::prev(BB->end()));
|
2013-03-29 13:13:07 +08:00
|
|
|
|
2013-03-26 08:42:09 +08:00
|
|
|
Value *PtrName;
|
|
|
|
StringRef Tmp = Ptr->getName();
|
|
|
|
if (0 == (PtrName = M->getGlobalVariable(Tmp, true))) {
|
|
|
|
Value *ActualPtrName = Builder.CreateGlobalStringPtr(Tmp,
|
|
|
|
Tmp + "_STR");
|
|
|
|
PtrName = new GlobalVariable(*M, I8X, true, GlobalVariable::InternalLinkage,
|
2013-03-29 13:13:07 +08:00
|
|
|
cast<Constant>(ActualPtrName), Tmp);
|
2013-03-26 08:42:09 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
Value *S;
|
|
|
|
std::string SeqStr = SequenceToString(Seq);
|
|
|
|
if (0 == (S = M->getGlobalVariable(SeqStr, true))) {
|
|
|
|
Value *ActualPtrName = Builder.CreateGlobalStringPtr(SeqStr,
|
|
|
|
SeqStr + "_STR");
|
|
|
|
S = new GlobalVariable(*M, I8X, true, GlobalVariable::InternalLinkage,
|
|
|
|
cast<Constant>(ActualPtrName), SeqStr);
|
|
|
|
}
|
2013-03-29 13:13:07 +08:00
|
|
|
Builder.CreateCall2(Callee, PtrName, S);
|
2013-03-26 08:42:09 +08:00
|
|
|
}
|
|
|
|
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
/// Adds a source annotation to pointer and a state change annotation to Inst
|
|
|
|
/// referencing the source annotation and the old/new state of pointer.
|
|
|
|
static void GenerateARCAnnotation(unsigned InstMDId,
|
|
|
|
unsigned PtrMDId,
|
|
|
|
Instruction *Inst,
|
|
|
|
Value *Ptr,
|
|
|
|
Sequence OldSeq,
|
|
|
|
Sequence NewSeq) {
|
|
|
|
if (EnableARCAnnotations) {
|
2013-04-18 05:59:41 +08:00
|
|
|
// If we have a target identifier, make sure that we match it before
|
|
|
|
// emitting an annotation.
|
|
|
|
if(!ARCAnnotationTargetIdentifier.empty() &&
|
|
|
|
!Ptr->getName().equals(ARCAnnotationTargetIdentifier))
|
|
|
|
return;
|
2013-04-18 12:34:11 +08:00
|
|
|
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
// First generate the source annotation on our pointer. This will return an
|
|
|
|
// MDString* if Ptr actually comes from an instruction implying we can put
|
|
|
|
// in a source annotation. If AppendMDNodeToSourcePtr returns 0 (i.e. NULL),
|
|
|
|
// then we know that our pointer is from an Argument so we put a reference
|
|
|
|
// to the argument number.
|
|
|
|
//
|
|
|
|
// The point of this is to make it easy for the
|
|
|
|
// llvm-arc-annotation-processor tool to cross reference where the source
|
|
|
|
// pointer is in the LLVM IR since the LLVM IR parser does not submit such
|
|
|
|
// information via debug info for backends to use (since why would anyone
|
2013-12-05 13:44:44 +08:00
|
|
|
// need such a thing from LLVM IR besides in non-standard cases
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
// [i.e. this]).
|
|
|
|
MDString *SourcePtrMDNode =
|
|
|
|
AppendMDNodeToSourcePtr(PtrMDId, Ptr);
|
|
|
|
AppendMDNodeToInstForPtr(InstMDId, Inst, Ptr, SourcePtrMDNode, OldSeq,
|
|
|
|
NewSeq);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// The actual interface for accessing the above functionality is defined via
|
|
|
|
// some simple macros which are defined below. We do this so that the user does
|
|
|
|
// not need to pass in what metadata id is needed resulting in cleaner code and
|
|
|
|
// additionally since it provides an easy way to conditionally no-op all
|
|
|
|
// annotation support in a non-debug build.
|
|
|
|
|
|
|
|
/// Use this macro to annotate a sequence state change when processing
|
|
|
|
/// instructions bottom up,
|
|
|
|
#define ANNOTATE_BOTTOMUP(inst, ptr, old, new) \
|
|
|
|
GenerateARCAnnotation(ARCAnnotationBottomUpMDKind, \
|
|
|
|
ARCAnnotationProvenanceSourceMDKind, (inst), \
|
|
|
|
const_cast<Value*>(ptr), (old), (new))
|
|
|
|
/// Use this macro to annotate a sequence state change when processing
|
|
|
|
/// instructions top down.
|
|
|
|
#define ANNOTATE_TOPDOWN(inst, ptr, old, new) \
|
|
|
|
GenerateARCAnnotation(ARCAnnotationTopDownMDKind, \
|
|
|
|
ARCAnnotationProvenanceSourceMDKind, (inst), \
|
|
|
|
const_cast<Value*>(ptr), (old), (new))
|
|
|
|
|
2013-04-04 06:41:59 +08:00
|
|
|
#define ANNOTATE_BB(_states, _bb, _name, _type, _direction) \
|
|
|
|
do { \
|
2013-04-06 02:10:41 +08:00
|
|
|
if (EnableARCAnnotations) { \
|
|
|
|
for(BBState::ptr_const_iterator I = (_states)._direction##_ptr_begin(), \
|
2013-04-04 06:41:59 +08:00
|
|
|
E = (_states)._direction##_ptr_end(); I != E; ++I) { \
|
2013-04-06 02:10:41 +08:00
|
|
|
Value *Ptr = const_cast<Value*>(I->first); \
|
|
|
|
Sequence Seq = I->second.GetSeq(); \
|
|
|
|
GenerateARCBB ## _type ## Annotation(_name, (_bb), Ptr, Seq); \
|
|
|
|
} \
|
2013-04-04 06:41:59 +08:00
|
|
|
} \
|
2013-04-06 02:10:41 +08:00
|
|
|
} while (0)
|
2013-04-04 06:41:59 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
#define ANNOTATE_BOTTOMUP_BBSTART(_states, _basicblock) \
|
2013-04-04 06:41:59 +08:00
|
|
|
ANNOTATE_BB(_states, _basicblock, "llvm.arc.annotation.bottomup.bbstart", \
|
|
|
|
Entrance, bottom_up)
|
2013-04-06 02:10:41 +08:00
|
|
|
#define ANNOTATE_BOTTOMUP_BBEND(_states, _basicblock) \
|
|
|
|
ANNOTATE_BB(_states, _basicblock, "llvm.arc.annotation.bottomup.bbend", \
|
2013-04-04 06:41:59 +08:00
|
|
|
Terminator, bottom_up)
|
2013-04-06 02:10:41 +08:00
|
|
|
#define ANNOTATE_TOPDOWN_BBSTART(_states, _basicblock) \
|
|
|
|
ANNOTATE_BB(_states, _basicblock, "llvm.arc.annotation.topdown.bbstart", \
|
2013-04-04 06:41:59 +08:00
|
|
|
Entrance, top_down)
|
2013-04-06 02:10:41 +08:00
|
|
|
#define ANNOTATE_TOPDOWN_BBEND(_states, _basicblock) \
|
|
|
|
ANNOTATE_BB(_states, _basicblock, "llvm.arc.annotation.topdown.bbend", \
|
2013-04-04 06:41:59 +08:00
|
|
|
Terminator, top_down)
|
|
|
|
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
#else // !ARC_ANNOTATION
|
|
|
|
// If annotations are off, noop.
|
|
|
|
#define ANNOTATE_BOTTOMUP(inst, ptr, old, new)
|
|
|
|
#define ANNOTATE_TOPDOWN(inst, ptr, old, new)
|
2013-04-04 06:41:59 +08:00
|
|
|
#define ANNOTATE_BOTTOMUP_BBSTART(states, basicblock)
|
|
|
|
#define ANNOTATE_BOTTOMUP_BBEND(states, basicblock)
|
|
|
|
#define ANNOTATE_TOPDOWN_BBSTART(states, basicblock)
|
|
|
|
#define ANNOTATE_TOPDOWN_BBEND(states, basicblock)
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
#endif // !ARC_ANNOTATION
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
namespace {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// \brief The main ARC optimization pass.
|
2011-06-16 07:37:01 +08:00
|
|
|
class ObjCARCOpt : public FunctionPass {
|
|
|
|
bool Changed;
|
|
|
|
ProvenanceAnalysis PA;
|
2013-07-06 09:39:23 +08:00
|
|
|
ARCRuntimeEntryPoints EP;
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-05-25 04:44:02 +08:00
|
|
|
// This is used to track if a pointer is stored into an alloca.
|
|
|
|
DenseSet<const Value *> MultiOwnersSet;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// A flag indicating whether this optimization pass should run.
|
2011-06-21 07:20:43 +08:00
|
|
|
bool Run;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Flags which determine whether each of the interesting runtine functions
|
|
|
|
/// is in fact used in the current function.
|
2011-06-16 07:37:01 +08:00
|
|
|
unsigned UsedInThisFunction;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The Metadata Kind for clang.imprecise_release metadata.
|
2011-06-16 07:37:01 +08:00
|
|
|
unsigned ImpreciseReleaseMDKind;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The Metadata Kind for clang.arc.copy_on_escape metadata.
|
2011-10-18 06:53:25 +08:00
|
|
|
unsigned CopyOnEscapeMDKind;
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The Metadata Kind for clang.arc.no_objc_arc_exceptions metadata.
|
2012-02-18 02:59:53 +08:00
|
|
|
unsigned NoObjCARCExceptionsMDKind;
|
|
|
|
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
#ifdef ARC_ANNOTATIONS
|
|
|
|
/// The Metadata Kind for llvm.arc.annotation.bottomup metadata.
|
|
|
|
unsigned ARCAnnotationBottomUpMDKind;
|
|
|
|
/// The Metadata Kind for llvm.arc.annotation.topdown metadata.
|
|
|
|
unsigned ARCAnnotationTopDownMDKind;
|
|
|
|
/// The Metadata Kind for llvm.arc.annotation.provenancesource metadata.
|
|
|
|
unsigned ARCAnnotationProvenanceSourceMDKind;
|
|
|
|
#endif // ARC_ANNOATIONS
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
|
2013-01-12 09:25:19 +08:00
|
|
|
void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
|
|
|
|
InstructionClass &Class);
|
2011-06-16 07:37:01 +08:00
|
|
|
void OptimizeIndividualCalls(Function &F);
|
|
|
|
|
|
|
|
void CheckForCFGHazards(const BasicBlock *BB,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
BBState &MyStates) const;
|
2012-03-23 02:24:56 +08:00
|
|
|
bool VisitInstructionBottomUp(Instruction *Inst,
|
2012-03-24 01:47:54 +08:00
|
|
|
BasicBlock *BB,
|
2012-03-23 02:24:56 +08:00
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
|
|
|
BBState &MyStates);
|
2011-06-16 07:37:01 +08:00
|
|
|
bool VisitBottomUp(BasicBlock *BB,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
MapVector<Value *, RRInfo> &Retains);
|
2012-03-23 02:24:56 +08:00
|
|
|
bool VisitInstructionTopDown(Instruction *Inst,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases,
|
|
|
|
BBState &MyStates);
|
2011-06-16 07:37:01 +08:00
|
|
|
bool VisitTopDown(BasicBlock *BB,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases);
|
|
|
|
bool Visit(Function &F,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases);
|
|
|
|
|
|
|
|
void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
|
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases,
|
2011-07-23 06:29:21 +08:00
|
|
|
SmallVectorImpl<Instruction *> &DeadInsts,
|
|
|
|
Module *M);
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-01-23 05:49:00 +08:00
|
|
|
bool ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases,
|
|
|
|
Module *M,
|
2013-07-14 12:42:23 +08:00
|
|
|
SmallVectorImpl<Instruction *> &NewRetains,
|
|
|
|
SmallVectorImpl<Instruction *> &NewReleases,
|
|
|
|
SmallVectorImpl<Instruction *> &DeadInsts,
|
2013-01-23 05:49:00 +08:00
|
|
|
RRInfo &RetainsToMove,
|
|
|
|
RRInfo &ReleasesToMove,
|
|
|
|
Value *Arg,
|
|
|
|
bool KnownSafe,
|
|
|
|
bool &AnyPairsCompletelyEliminated);
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
2011-07-23 06:29:21 +08:00
|
|
|
DenseMap<Value *, RRInfo> &Releases,
|
|
|
|
Module *M);
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
void OptimizeWeakCalls(Function &F);
|
|
|
|
|
|
|
|
bool OptimizeSequences(Function &F);
|
|
|
|
|
|
|
|
void OptimizeReturns(Function &F);
|
|
|
|
|
2013-04-29 14:16:57 +08:00
|
|
|
#ifndef NDEBUG
|
|
|
|
void GatherStatistics(Function &F, bool AfterOptimization = false);
|
|
|
|
#endif
|
|
|
|
|
2014-03-05 17:10:37 +08:00
|
|
|
void getAnalysisUsage(AnalysisUsage &AU) const override;
|
|
|
|
bool doInitialization(Module &M) override;
|
|
|
|
bool runOnFunction(Function &F) override;
|
|
|
|
void releaseMemory() override;
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
public:
|
|
|
|
static char ID;
|
|
|
|
ObjCARCOpt() : FunctionPass(ID) {
|
|
|
|
initializeObjCARCOptPass(*PassRegistry::getPassRegistry());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
char ObjCARCOpt::ID = 0;
|
|
|
|
INITIALIZE_PASS_BEGIN(ObjCARCOpt,
|
|
|
|
"objc-arc", "ObjC ARC optimization", false, false)
|
|
|
|
INITIALIZE_PASS_DEPENDENCY(ObjCARCAliasAnalysis)
|
|
|
|
INITIALIZE_PASS_END(ObjCARCOpt,
|
|
|
|
"objc-arc", "ObjC ARC optimization", false, false)
|
|
|
|
|
|
|
|
Pass *llvm::createObjCARCOptPass() {
|
|
|
|
return new ObjCARCOpt();
|
|
|
|
}
|
|
|
|
|
|
|
|
void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
|
|
|
|
AU.addRequired<ObjCARCAliasAnalysis>();
|
|
|
|
AU.addRequired<AliasAnalysis>();
|
|
|
|
// ARC optimization doesn't currently split critical edges.
|
|
|
|
AU.setPreservesCFG();
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Turn objc_retainAutoreleasedReturnValue into objc_retain if the operand is
|
|
|
|
/// not a return value. Or, if it can be paired with an
|
|
|
|
/// objc_autoreleaseReturnValue, delete the pair and return true.
|
2011-06-16 07:37:01 +08:00
|
|
|
bool
|
|
|
|
ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
|
2012-03-24 02:09:00 +08:00
|
|
|
// Check for the argument being from an immediately preceding call or invoke.
|
2012-04-28 02:56:31 +08:00
|
|
|
const Value *Arg = GetObjCArg(RetainRV);
|
|
|
|
ImmutableCallSite CS(Arg);
|
|
|
|
if (const Instruction *Call = CS.getInstruction()) {
|
2011-06-16 07:37:01 +08:00
|
|
|
if (Call->getParent() == RetainRV->getParent()) {
|
2012-04-28 02:56:31 +08:00
|
|
|
BasicBlock::const_iterator I = Call;
|
2011-06-16 07:37:01 +08:00
|
|
|
++I;
|
2013-03-25 17:27:43 +08:00
|
|
|
while (IsNoopInstruction(I)) ++I;
|
2011-06-16 07:37:01 +08:00
|
|
|
if (&*I == RetainRV)
|
|
|
|
return false;
|
2012-04-28 02:56:31 +08:00
|
|
|
} else if (const InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
|
2012-03-24 02:09:00 +08:00
|
|
|
BasicBlock *RetainRVParent = RetainRV->getParent();
|
|
|
|
if (II->getNormalDest() == RetainRVParent) {
|
2012-04-28 02:56:31 +08:00
|
|
|
BasicBlock::const_iterator I = RetainRVParent->begin();
|
2013-03-25 17:27:43 +08:00
|
|
|
while (IsNoopInstruction(I)) ++I;
|
2012-03-24 02:09:00 +08:00
|
|
|
if (&*I == RetainRV)
|
|
|
|
return false;
|
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
2012-03-24 02:09:00 +08:00
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
// Check for being preceded by an objc_autoreleaseReturnValue on the same
|
|
|
|
// pointer. In this case, we can delete the pair.
|
|
|
|
BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin();
|
|
|
|
if (I != Begin) {
|
2013-03-25 17:27:43 +08:00
|
|
|
do --I; while (I != Begin && IsNoopInstruction(I));
|
2011-06-16 07:37:01 +08:00
|
|
|
if (GetBasicInstructionClass(I) == IC_AutoreleaseRV &&
|
|
|
|
GetObjCArg(I) == Arg) {
|
|
|
|
Changed = true;
|
|
|
|
++NumPeeps;
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Erasing autoreleaseRV,retainRV pair: " << *I << "\n"
|
|
|
|
<< "Erasing " << *RetainRV << "\n");
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
EraseInstruction(I);
|
|
|
|
EraseInstruction(RetainRV);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Turn it to a plain objc_retain.
|
|
|
|
Changed = true;
|
|
|
|
++NumPeeps;
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Transforming objc_retainAutoreleasedReturnValue => "
|
2013-01-06 01:55:42 +08:00
|
|
|
"objc_retain since the operand is not a return value.\n"
|
2013-04-06 02:10:41 +08:00
|
|
|
"Old = " << *RetainRV << "\n");
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2013-07-06 09:39:23 +08:00
|
|
|
Constant *NewDecl = EP.get(ARCRuntimeEntryPoints::EPT_Retain);
|
|
|
|
cast<CallInst>(RetainRV)->setCalledFunction(NewDecl);
|
2013-01-06 01:55:42 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "New = " << *RetainRV << "\n");
|
2013-01-06 01:55:42 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Turn objc_autoreleaseReturnValue into objc_autorelease if the result is not
|
|
|
|
/// used as a return value.
|
2011-06-16 07:37:01 +08:00
|
|
|
void
|
2013-01-12 09:25:19 +08:00
|
|
|
ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
|
|
|
|
InstructionClass &Class) {
|
2011-06-16 07:37:01 +08:00
|
|
|
// Check for a return of the pointer value.
|
|
|
|
const Value *Ptr = GetObjCArg(AutoreleaseRV);
|
2011-08-12 08:36:31 +08:00
|
|
|
SmallVector<const Value *, 2> Users;
|
|
|
|
Users.push_back(Ptr);
|
|
|
|
do {
|
|
|
|
Ptr = Users.pop_back_val();
|
2014-03-09 11:16:01 +08:00
|
|
|
for (const User *U : Ptr->users()) {
|
|
|
|
if (isa<ReturnInst>(U) || GetBasicInstructionClass(U) == IC_RetainRV)
|
2011-08-12 08:36:31 +08:00
|
|
|
return;
|
2014-03-09 11:16:01 +08:00
|
|
|
if (isa<BitCastInst>(U))
|
|
|
|
Users.push_back(U);
|
2011-08-12 08:36:31 +08:00
|
|
|
}
|
|
|
|
} while (!Users.empty());
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
Changed = true;
|
|
|
|
++NumPeeps;
|
2013-01-07 05:07:11 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Transforming objc_autoreleaseReturnValue => "
|
2013-01-07 05:07:11 +08:00
|
|
|
"objc_autorelease since its operand is not used as a return "
|
|
|
|
"value.\n"
|
2013-04-06 02:10:41 +08:00
|
|
|
"Old = " << *AutoreleaseRV << "\n");
|
2013-01-07 05:07:11 +08:00
|
|
|
|
2013-01-12 09:25:15 +08:00
|
|
|
CallInst *AutoreleaseRVCI = cast<CallInst>(AutoreleaseRV);
|
2013-07-06 09:39:23 +08:00
|
|
|
Constant *NewDecl = EP.get(ARCRuntimeEntryPoints::EPT_Autorelease);
|
|
|
|
AutoreleaseRVCI->setCalledFunction(NewDecl);
|
2013-01-12 09:25:15 +08:00
|
|
|
AutoreleaseRVCI->setTailCall(false); // Never tail call objc_autorelease.
|
2013-01-12 09:25:19 +08:00
|
|
|
Class = IC_Autorelease;
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "New: " << *AutoreleaseRV << "\n");
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Visit each call, one at a time, and make simplifications without doing any
|
|
|
|
/// additional analysis.
|
2011-06-16 07:37:01 +08:00
|
|
|
void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeIndividualCalls ==\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
// Reset all the flags in preparation for recomputing them.
|
|
|
|
UsedInThisFunction = 0;
|
|
|
|
|
|
|
|
// Visit all objc_* calls in F.
|
|
|
|
for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
|
|
|
|
Instruction *Inst = &*I++;
|
2013-01-02 00:05:48 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
InstructionClass Class = GetBasicInstructionClass(Inst);
|
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Visiting: Class: " << Class << "; " << *Inst << "\n");
|
2013-01-18 02:32:34 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
switch (Class) {
|
|
|
|
default: break;
|
|
|
|
|
|
|
|
// Delete no-op casts. These function calls have special semantics, but
|
|
|
|
// the semantics are entirely implemented via lowering in the front-end,
|
|
|
|
// so by the time they reach the optimizer, they are just no-op calls
|
|
|
|
// which return their argument.
|
|
|
|
//
|
|
|
|
// There are gray areas here, as the ability to cast reference-counted
|
|
|
|
// pointers to raw void* and back allows code to break ARC assumptions,
|
|
|
|
// however these are currently considered to be unimportant.
|
|
|
|
case IC_NoopCast:
|
|
|
|
Changed = true;
|
|
|
|
++NumNoops;
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Erasing no-op cast: " << *Inst << "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
EraseInstruction(Inst);
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// If the pointer-to-weak-pointer is null, it's undefined behavior.
|
|
|
|
case IC_StoreWeak:
|
|
|
|
case IC_LoadWeak:
|
|
|
|
case IC_LoadWeakRetained:
|
|
|
|
case IC_InitWeak:
|
|
|
|
case IC_DestroyWeak: {
|
|
|
|
CallInst *CI = cast<CallInst>(Inst);
|
2013-03-25 17:27:43 +08:00
|
|
|
if (IsNullOrUndef(CI->getArgOperand(0))) {
|
2012-04-14 02:57:48 +08:00
|
|
|
Changed = true;
|
2011-07-18 12:54:35 +08:00
|
|
|
Type *Ty = CI->getArgOperand(0)->getType();
|
2011-06-16 07:37:01 +08:00
|
|
|
new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
|
|
|
|
Constant::getNullValue(Ty),
|
|
|
|
CI);
|
2013-01-08 05:26:07 +08:00
|
|
|
llvm::Value *NewValue = UndefValue::get(CI->getType());
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "A null pointer-to-weak-pointer is undefined behavior."
|
|
|
|
"\nOld = " << *CI << "\nNew = " << *NewValue << "\n");
|
2013-01-07 05:54:30 +08:00
|
|
|
CI->replaceAllUsesWith(NewValue);
|
2011-06-16 07:37:01 +08:00
|
|
|
CI->eraseFromParent();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case IC_CopyWeak:
|
|
|
|
case IC_MoveWeak: {
|
|
|
|
CallInst *CI = cast<CallInst>(Inst);
|
2013-03-25 17:27:43 +08:00
|
|
|
if (IsNullOrUndef(CI->getArgOperand(0)) ||
|
|
|
|
IsNullOrUndef(CI->getArgOperand(1))) {
|
2012-04-14 02:57:48 +08:00
|
|
|
Changed = true;
|
2011-07-18 12:54:35 +08:00
|
|
|
Type *Ty = CI->getArgOperand(0)->getType();
|
2011-06-16 07:37:01 +08:00
|
|
|
new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
|
|
|
|
Constant::getNullValue(Ty),
|
|
|
|
CI);
|
2013-01-07 05:54:30 +08:00
|
|
|
|
|
|
|
llvm::Value *NewValue = UndefValue::get(CI->getType());
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "A null pointer-to-weak-pointer is undefined behavior."
|
|
|
|
"\nOld = " << *CI << "\nNew = " << *NewValue << "\n");
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2013-01-07 05:54:30 +08:00
|
|
|
CI->replaceAllUsesWith(NewValue);
|
2011-06-16 07:37:01 +08:00
|
|
|
CI->eraseFromParent();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case IC_RetainRV:
|
|
|
|
if (OptimizeRetainRVCall(F, Inst))
|
|
|
|
continue;
|
|
|
|
break;
|
|
|
|
case IC_AutoreleaseRV:
|
2013-01-12 09:25:19 +08:00
|
|
|
OptimizeAutoreleaseRVCall(F, Inst, Class);
|
2011-06-16 07:37:01 +08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2013-04-03 10:57:24 +08:00
|
|
|
// objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
|
2011-06-16 07:37:01 +08:00
|
|
|
if (IsAutorelease(Class) && Inst->use_empty()) {
|
|
|
|
CallInst *Call = cast<CallInst>(Inst);
|
|
|
|
const Value *Arg = Call->getArgOperand(0);
|
|
|
|
Arg = FindSingleUseIdentifiedObject(Arg);
|
|
|
|
if (Arg) {
|
|
|
|
Changed = true;
|
|
|
|
++NumAutoreleases;
|
|
|
|
|
|
|
|
// Create the declaration lazily.
|
|
|
|
LLVMContext &C = Inst->getContext();
|
2013-07-06 09:41:35 +08:00
|
|
|
|
2013-07-06 09:39:23 +08:00
|
|
|
Constant *Decl = EP.get(ARCRuntimeEntryPoints::EPT_Release);
|
|
|
|
CallInst *NewCall = CallInst::Create(Decl, Call->getArgOperand(0), "",
|
|
|
|
Call);
|
2013-05-05 08:40:33 +08:00
|
|
|
NewCall->setMetadata(ImpreciseReleaseMDKind, MDNode::get(C, None));
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Replacing autorelease{,RV}(x) with objc_release(x) "
|
|
|
|
"since x is otherwise unused.\nOld: " << *Call << "\nNew: "
|
|
|
|
<< *NewCall << "\n");
|
2013-01-08 05:26:07 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
EraseInstruction(Call);
|
|
|
|
Inst = NewCall;
|
|
|
|
Class = IC_Release;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// For functions which can never be passed stack arguments, add
|
|
|
|
// a tail keyword.
|
|
|
|
if (IsAlwaysTail(Class)) {
|
|
|
|
Changed = true;
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Adding tail keyword to function since it can never be "
|
|
|
|
"passed stack args: " << *Inst << "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
cast<CallInst>(Inst)->setTailCall();
|
|
|
|
}
|
|
|
|
|
2013-01-12 09:25:15 +08:00
|
|
|
// Ensure that functions that can never have a "tail" keyword due to the
|
|
|
|
// semantics of ARC truly do not do so.
|
|
|
|
if (IsNeverTail(Class)) {
|
|
|
|
Changed = true;
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Removing tail keyword from function: " << *Inst <<
|
2013-01-12 09:25:15 +08:00
|
|
|
"\n");
|
|
|
|
cast<CallInst>(Inst)->setTailCall(false);
|
|
|
|
}
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// Set nounwind as needed.
|
|
|
|
if (IsNoThrow(Class)) {
|
|
|
|
Changed = true;
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Found no throw class. Setting nounwind on: " << *Inst
|
|
|
|
<< "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
cast<CallInst>(Inst)->setDoesNotThrow();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!IsNoopOnNull(Class)) {
|
|
|
|
UsedInThisFunction |= 1 << Class;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
const Value *Arg = GetObjCArg(Inst);
|
|
|
|
|
|
|
|
// ARC calls with null are no-ops. Delete them.
|
2013-03-25 17:27:43 +08:00
|
|
|
if (IsNullOrUndef(Arg)) {
|
2011-06-16 07:37:01 +08:00
|
|
|
Changed = true;
|
|
|
|
++NumNoops;
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "ARC calls with null are no-ops. Erasing: " << *Inst
|
|
|
|
<< "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
EraseInstruction(Inst);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Keep track of which of retain, release, autorelease, and retain_block
|
|
|
|
// are actually present in this function.
|
|
|
|
UsedInThisFunction |= 1 << Class;
|
|
|
|
|
|
|
|
// If Arg is a PHI, and one or more incoming values to the
|
|
|
|
// PHI are null, and the call is control-equivalent to the PHI, and there
|
|
|
|
// are no relevant side effects between the PHI and the call, the call
|
|
|
|
// could be pushed up to just those paths with non-null incoming values.
|
|
|
|
// For now, don't bother splitting critical edges for this.
|
|
|
|
SmallVector<std::pair<Instruction *, const Value *>, 4> Worklist;
|
|
|
|
Worklist.push_back(std::make_pair(Inst, Arg));
|
|
|
|
do {
|
|
|
|
std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
|
|
|
|
Inst = Pair.first;
|
|
|
|
Arg = Pair.second;
|
|
|
|
|
|
|
|
const PHINode *PN = dyn_cast<PHINode>(Arg);
|
|
|
|
if (!PN) continue;
|
|
|
|
|
|
|
|
// Determine if the PHI has any null operands, or any incoming
|
|
|
|
// critical edges.
|
|
|
|
bool HasNull = false;
|
|
|
|
bool HasCriticalEdges = false;
|
|
|
|
for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
|
|
|
|
Value *Incoming =
|
|
|
|
StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
|
2013-03-25 17:27:43 +08:00
|
|
|
if (IsNullOrUndef(Incoming))
|
2011-06-16 07:37:01 +08:00
|
|
|
HasNull = true;
|
|
|
|
else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
|
|
|
|
.getNumSuccessors() != 1) {
|
|
|
|
HasCriticalEdges = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// If we have null operands and no critical edges, optimize.
|
|
|
|
if (!HasCriticalEdges && HasNull) {
|
|
|
|
SmallPtrSet<Instruction *, 4> DependingInstructions;
|
|
|
|
SmallPtrSet<const BasicBlock *, 4> Visited;
|
|
|
|
|
|
|
|
// Check that there is nothing that cares about the reference
|
|
|
|
// count between the call and the phi.
|
2012-04-13 08:59:57 +08:00
|
|
|
switch (Class) {
|
|
|
|
case IC_Retain:
|
|
|
|
case IC_RetainBlock:
|
|
|
|
// These can always be moved up.
|
|
|
|
break;
|
|
|
|
case IC_Release:
|
2012-05-09 07:39:44 +08:00
|
|
|
// These can't be moved across things that care about the retain
|
|
|
|
// count.
|
2012-04-13 08:59:57 +08:00
|
|
|
FindDependencies(NeedsPositiveRetainCount, Arg,
|
|
|
|
Inst->getParent(), Inst,
|
|
|
|
DependingInstructions, Visited, PA);
|
|
|
|
break;
|
|
|
|
case IC_Autorelease:
|
|
|
|
// These can't be moved across autorelease pool scope boundaries.
|
|
|
|
FindDependencies(AutoreleasePoolBoundary, Arg,
|
|
|
|
Inst->getParent(), Inst,
|
|
|
|
DependingInstructions, Visited, PA);
|
|
|
|
break;
|
|
|
|
case IC_RetainRV:
|
|
|
|
case IC_AutoreleaseRV:
|
|
|
|
// Don't move these; the RV optimization depends on the autoreleaseRV
|
|
|
|
// being tail called, and the retainRV being immediately after a call
|
|
|
|
// (which might still happen if we get lucky with codegen layout, but
|
|
|
|
// it's not worth taking the chance).
|
|
|
|
continue;
|
|
|
|
default:
|
|
|
|
llvm_unreachable("Invalid dependence flavor");
|
|
|
|
}
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
if (DependingInstructions.size() == 1 &&
|
|
|
|
*DependingInstructions.begin() == PN) {
|
|
|
|
Changed = true;
|
|
|
|
++NumPartialNoops;
|
|
|
|
// Clone the call into each predecessor that has a non-null value.
|
|
|
|
CallInst *CInst = cast<CallInst>(Inst);
|
2011-07-18 12:54:35 +08:00
|
|
|
Type *ParamTy = CInst->getArgOperand(0)->getType();
|
2011-06-16 07:37:01 +08:00
|
|
|
for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
|
|
|
|
Value *Incoming =
|
|
|
|
StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
|
2013-03-25 17:27:43 +08:00
|
|
|
if (!IsNullOrUndef(Incoming)) {
|
2011-06-16 07:37:01 +08:00
|
|
|
CallInst *Clone = cast<CallInst>(CInst->clone());
|
|
|
|
Value *Op = PN->getIncomingValue(i);
|
|
|
|
Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
|
|
|
|
if (Op->getType() != ParamTy)
|
|
|
|
Op = new BitCastInst(Op, ParamTy, "", InsertPos);
|
|
|
|
Clone->setArgOperand(0, Op);
|
|
|
|
Clone->insertBefore(InsertPos);
|
2013-01-10 03:23:24 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Cloning "
|
2013-01-10 03:23:24 +08:00
|
|
|
<< *CInst << "\n"
|
2013-04-06 02:10:41 +08:00
|
|
|
"And inserting clone at " << *InsertPos << "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
Worklist.push_back(std::make_pair(Clone, Incoming));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Erase the original call.
|
2013-01-10 03:23:24 +08:00
|
|
|
DEBUG(dbgs() << "Erasing: " << *CInst << "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
EraseInstruction(CInst);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} while (!Worklist.empty());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-04-18 13:39:45 +08:00
|
|
|
/// If we have a top down pointer in the S_Use state, make sure that there are
|
|
|
|
/// no CFG hazards by checking the states of various bottom up pointers.
|
|
|
|
static void CheckForUseCFGHazard(const Sequence SuccSSeq,
|
|
|
|
const bool SuccSRRIKnownSafe,
|
|
|
|
PtrState &S,
|
|
|
|
bool &SomeSuccHasSame,
|
|
|
|
bool &AllSuccsHaveSame,
|
2013-05-25 04:44:05 +08:00
|
|
|
bool &NotAllSeqEqualButKnownSafe,
|
2013-04-18 13:39:45 +08:00
|
|
|
bool &ShouldContinue) {
|
|
|
|
switch (SuccSSeq) {
|
|
|
|
case S_CanRelease: {
|
2013-06-21 14:59:02 +08:00
|
|
|
if (!S.IsKnownSafe() && !SuccSRRIKnownSafe) {
|
2013-04-18 13:39:45 +08:00
|
|
|
S.ClearSequenceProgress();
|
|
|
|
break;
|
|
|
|
}
|
2013-06-22 03:12:36 +08:00
|
|
|
S.SetCFGHazardAfflicted(true);
|
2013-04-18 13:39:45 +08:00
|
|
|
ShouldContinue = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case S_Use:
|
|
|
|
SomeSuccHasSame = true;
|
|
|
|
break;
|
|
|
|
case S_Stop:
|
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
2013-06-21 14:59:02 +08:00
|
|
|
if (!S.IsKnownSafe() && !SuccSRRIKnownSafe)
|
2013-04-18 13:39:45 +08:00
|
|
|
AllSuccsHaveSame = false;
|
2013-05-25 04:44:05 +08:00
|
|
|
else
|
|
|
|
NotAllSeqEqualButKnownSafe = true;
|
2013-04-18 13:39:45 +08:00
|
|
|
break;
|
|
|
|
case S_Retain:
|
|
|
|
llvm_unreachable("bottom-up pointer in retain state!");
|
|
|
|
case S_None:
|
|
|
|
llvm_unreachable("This should have been handled earlier.");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// If we have a Top Down pointer in the S_CanRelease state, make sure that
|
|
|
|
/// there are no CFG hazards by checking the states of various bottom up
|
|
|
|
/// pointers.
|
|
|
|
static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq,
|
|
|
|
const bool SuccSRRIKnownSafe,
|
|
|
|
PtrState &S,
|
|
|
|
bool &SomeSuccHasSame,
|
2013-05-25 04:44:05 +08:00
|
|
|
bool &AllSuccsHaveSame,
|
|
|
|
bool &NotAllSeqEqualButKnownSafe) {
|
2013-04-18 13:39:45 +08:00
|
|
|
switch (SuccSSeq) {
|
|
|
|
case S_CanRelease:
|
|
|
|
SomeSuccHasSame = true;
|
|
|
|
break;
|
|
|
|
case S_Stop:
|
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
|
|
|
case S_Use:
|
2013-06-21 14:59:02 +08:00
|
|
|
if (!S.IsKnownSafe() && !SuccSRRIKnownSafe)
|
2013-04-18 13:39:45 +08:00
|
|
|
AllSuccsHaveSame = false;
|
2013-05-25 04:44:05 +08:00
|
|
|
else
|
|
|
|
NotAllSeqEqualButKnownSafe = true;
|
2013-04-18 13:39:45 +08:00
|
|
|
break;
|
|
|
|
case S_Retain:
|
|
|
|
llvm_unreachable("bottom-up pointer in retain state!");
|
|
|
|
case S_None:
|
|
|
|
llvm_unreachable("This should have been handled earlier.");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Check for critical edges, loop boundaries, irreducible control flow, or
|
|
|
|
/// other CFG structures where moving code across the edge would result in it
|
|
|
|
/// being executed more.
|
2011-06-16 07:37:01 +08:00
|
|
|
void
|
|
|
|
ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
BBState &MyStates) const {
|
|
|
|
// If any top-down local-use or possible-dec has a succ which is earlier in
|
|
|
|
// the sequence, forget it.
|
2012-03-02 09:13:53 +08:00
|
|
|
for (BBState::ptr_iterator I = MyStates.top_down_ptr_begin(),
|
2013-04-18 13:39:45 +08:00
|
|
|
E = MyStates.top_down_ptr_end(); I != E; ++I) {
|
|
|
|
PtrState &S = I->second;
|
|
|
|
const Sequence Seq = I->second.GetSeq();
|
|
|
|
|
|
|
|
// We only care about S_Retain, S_CanRelease, and S_Use.
|
|
|
|
if (Seq == S_None)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// Make sure that if extra top down states are added in the future that this
|
|
|
|
// code is updated to handle it.
|
|
|
|
assert((Seq == S_Retain || Seq == S_CanRelease || Seq == S_Use) &&
|
|
|
|
"Unknown top down sequence state.");
|
|
|
|
|
|
|
|
const Value *Arg = I->first;
|
|
|
|
const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
|
|
|
|
bool SomeSuccHasSame = false;
|
|
|
|
bool AllSuccsHaveSame = true;
|
2013-05-25 04:44:05 +08:00
|
|
|
bool NotAllSeqEqualButKnownSafe = false;
|
2013-04-18 13:39:45 +08:00
|
|
|
|
|
|
|
succ_const_iterator SI(TI), SE(TI, false);
|
|
|
|
|
|
|
|
for (; SI != SE; ++SI) {
|
|
|
|
// If VisitBottomUp has pointer information for this successor, take
|
|
|
|
// what we know about it.
|
|
|
|
const DenseMap<const BasicBlock *, BBState>::iterator BBI =
|
|
|
|
BBStates.find(*SI);
|
|
|
|
assert(BBI != BBStates.end());
|
|
|
|
const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
|
|
|
|
const Sequence SuccSSeq = SuccS.GetSeq();
|
|
|
|
|
|
|
|
// If bottom up, the pointer is in an S_None state, clear the sequence
|
|
|
|
// progress since the sequence in the bottom up state finished
|
|
|
|
// suggesting a mismatch in between retains/releases. This is true for
|
|
|
|
// all three cases that we are handling here: S_Retain, S_Use, and
|
|
|
|
// S_CanRelease.
|
|
|
|
if (SuccSSeq == S_None) {
|
2011-08-12 08:26:31 +08:00
|
|
|
S.ClearSequenceProgress();
|
2013-04-18 13:39:45 +08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we have S_Use or S_CanRelease, perform our check for cfg hazard
|
|
|
|
// checks.
|
2013-06-21 14:59:02 +08:00
|
|
|
const bool SuccSRRIKnownSafe = SuccS.IsKnownSafe();
|
2013-04-18 13:39:45 +08:00
|
|
|
|
|
|
|
// *NOTE* We do not use Seq from above here since we are allowing for
|
|
|
|
// S.GetSeq() to change while we are visiting basic blocks.
|
|
|
|
switch(S.GetSeq()) {
|
|
|
|
case S_Use: {
|
|
|
|
bool ShouldContinue = false;
|
2013-05-25 04:44:05 +08:00
|
|
|
CheckForUseCFGHazard(SuccSSeq, SuccSRRIKnownSafe, S, SomeSuccHasSame,
|
|
|
|
AllSuccsHaveSame, NotAllSeqEqualButKnownSafe,
|
2013-04-18 13:39:45 +08:00
|
|
|
ShouldContinue);
|
|
|
|
if (ShouldContinue)
|
2011-08-12 08:26:31 +08:00
|
|
|
continue;
|
2013-04-18 13:39:45 +08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case S_CanRelease: {
|
2013-05-25 04:44:05 +08:00
|
|
|
CheckForCanReleaseCFGHazard(SuccSSeq, SuccSRRIKnownSafe, S,
|
|
|
|
SomeSuccHasSame, AllSuccsHaveSame,
|
|
|
|
NotAllSeqEqualButKnownSafe);
|
2013-04-18 13:39:45 +08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case S_Retain:
|
|
|
|
case S_None:
|
|
|
|
case S_Stop:
|
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
|
|
|
break;
|
2011-08-12 08:26:31 +08:00
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
2013-04-18 13:39:45 +08:00
|
|
|
|
|
|
|
// If the state at the other end of any of the successor edges
|
|
|
|
// matches the current state, require all edges to match. This
|
|
|
|
// guards against loops in the middle of a sequence.
|
2013-05-25 04:44:05 +08:00
|
|
|
if (SomeSuccHasSame && !AllSuccsHaveSame) {
|
2013-04-18 13:39:45 +08:00
|
|
|
S.ClearSequenceProgress();
|
2013-05-25 04:44:05 +08:00
|
|
|
} else if (NotAllSeqEqualButKnownSafe) {
|
|
|
|
// If we would have cleared the state foregoing the fact that we are known
|
|
|
|
// safe, stop code motion. This is because whether or not it is safe to
|
|
|
|
// remove RR pairs via KnownSafe is an orthogonal concept to whether we
|
|
|
|
// are allowed to perform code motion.
|
2013-06-22 03:12:36 +08:00
|
|
|
S.SetCFGHazardAfflicted(true);
|
2013-05-25 04:44:05 +08:00
|
|
|
}
|
2013-04-18 13:39:45 +08:00
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2012-03-23 02:24:56 +08:00
|
|
|
bool
|
|
|
|
ObjCARCOpt::VisitInstructionBottomUp(Instruction *Inst,
|
2012-03-24 01:47:54 +08:00
|
|
|
BasicBlock *BB,
|
2012-03-23 02:24:56 +08:00
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
|
|
|
BBState &MyStates) {
|
|
|
|
bool NestingDetected = false;
|
|
|
|
InstructionClass Class = GetInstructionClass(Inst);
|
|
|
|
const Value *Arg = 0;
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-06 06:54:28 +08:00
|
|
|
DEBUG(dbgs() << "Class: " << Class << "\n");
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2012-03-23 02:24:56 +08:00
|
|
|
switch (Class) {
|
|
|
|
case IC_Release: {
|
|
|
|
Arg = GetObjCArg(Inst);
|
|
|
|
|
|
|
|
PtrState &S = MyStates.getPtrBottomUpState(Arg);
|
|
|
|
|
|
|
|
// If we see two releases in a row on the same pointer. If so, make
|
|
|
|
// a note, and we'll cicle back to revisit it after we've
|
|
|
|
// hopefully eliminated the second release, which may allow us to
|
|
|
|
// eliminate the first release too.
|
|
|
|
// Theoretically we could implement removal of nested retain+release
|
|
|
|
// pairs by making PtrState hold a stack of states, but this is
|
|
|
|
// simple and avoids adding overhead for the non-nested case.
|
2013-01-13 15:00:51 +08:00
|
|
|
if (S.GetSeq() == S_Release || S.GetSeq() == S_MovableRelease) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Found nested releases (i.e. a release pair)\n");
|
2012-03-23 02:24:56 +08:00
|
|
|
NestingDetected = true;
|
2013-01-13 15:00:51 +08:00
|
|
|
}
|
2012-03-23 02:24:56 +08:00
|
|
|
|
|
|
|
MDNode *ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
Sequence NewSeq = ReleaseMetadata ? S_MovableRelease : S_Release;
|
|
|
|
ANNOTATE_BOTTOMUP(Inst, Arg, S.GetSeq(), NewSeq);
|
|
|
|
S.ResetSequenceProgress(NewSeq);
|
2013-06-21 15:03:07 +08:00
|
|
|
S.SetReleaseMetadata(ReleaseMetadata);
|
2013-06-21 14:59:02 +08:00
|
|
|
S.SetKnownSafe(S.HasKnownPositiveRefCount());
|
2013-06-21 15:00:44 +08:00
|
|
|
S.SetTailCallRelease(cast<CallInst>(Inst)->isTailCall());
|
2013-06-22 03:44:27 +08:00
|
|
|
S.InsertCall(Inst);
|
2012-09-05 07:16:20 +08:00
|
|
|
S.SetKnownPositiveRefCount();
|
2012-03-23 02:24:56 +08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case IC_RetainBlock:
|
2013-03-29 04:11:19 +08:00
|
|
|
// In OptimizeIndividualCalls, we have strength reduced all optimizable
|
|
|
|
// objc_retainBlocks to objc_retains. Thus at this point any
|
|
|
|
// objc_retainBlocks that we see are not optimizable.
|
|
|
|
break;
|
2012-03-23 02:24:56 +08:00
|
|
|
case IC_Retain:
|
|
|
|
case IC_RetainRV: {
|
|
|
|
Arg = GetObjCArg(Inst);
|
|
|
|
|
|
|
|
PtrState &S = MyStates.getPtrBottomUpState(Arg);
|
2012-04-25 08:50:46 +08:00
|
|
|
S.SetKnownPositiveRefCount();
|
2012-03-23 02:24:56 +08:00
|
|
|
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
Sequence OldSeq = S.GetSeq();
|
|
|
|
switch (OldSeq) {
|
2012-03-23 02:24:56 +08:00
|
|
|
case S_Stop:
|
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
|
|
|
case S_Use:
|
2013-04-06 06:54:28 +08:00
|
|
|
// If OldSeq is not S_Use or OldSeq is S_Use and we are tracking an
|
|
|
|
// imprecise release, clear our reverse insertion points.
|
2013-06-22 03:12:38 +08:00
|
|
|
if (OldSeq != S_Use || S.IsTrackingImpreciseReleases())
|
2013-06-22 03:44:27 +08:00
|
|
|
S.ClearReverseInsertPts();
|
2012-03-23 02:24:56 +08:00
|
|
|
// FALL THROUGH
|
|
|
|
case S_CanRelease:
|
|
|
|
// Don't do retain+release tracking for IC_RetainRV, because it's
|
|
|
|
// better to let it remain as the first instruction after a call.
|
2013-03-29 07:08:44 +08:00
|
|
|
if (Class != IC_RetainRV)
|
2013-06-22 03:44:30 +08:00
|
|
|
Retains[Inst] = S.GetRRInfo();
|
2012-03-23 02:24:56 +08:00
|
|
|
S.ClearSequenceProgress();
|
|
|
|
break;
|
|
|
|
case S_None:
|
|
|
|
break;
|
|
|
|
case S_Retain:
|
|
|
|
llvm_unreachable("bottom-up pointer in retain state!");
|
|
|
|
}
|
2013-04-06 07:46:45 +08:00
|
|
|
ANNOTATE_BOTTOMUP(Inst, Arg, OldSeq, S.GetSeq());
|
2013-04-06 06:54:32 +08:00
|
|
|
// A retain moving bottom up can be a use.
|
|
|
|
break;
|
2012-03-23 02:24:56 +08:00
|
|
|
}
|
|
|
|
case IC_AutoreleasepoolPop:
|
|
|
|
// Conservatively, clear MyStates for all known pointers.
|
|
|
|
MyStates.clearBottomUpPointers();
|
|
|
|
return NestingDetected;
|
|
|
|
case IC_AutoreleasepoolPush:
|
|
|
|
case IC_None:
|
|
|
|
// These are irrelevant.
|
|
|
|
return NestingDetected;
|
2013-05-14 07:49:42 +08:00
|
|
|
case IC_User:
|
|
|
|
// If we have a store into an alloca of a pointer we are tracking, the
|
|
|
|
// pointer has multiple owners implying that we must be more conservative.
|
|
|
|
//
|
|
|
|
// This comes up in the context of a pointer being ``KnownSafe''. In the
|
2014-01-25 01:20:08 +08:00
|
|
|
// presence of a block being initialized, the frontend will emit the
|
2013-05-14 07:49:42 +08:00
|
|
|
// objc_retain on the original pointer and the release on the pointer loaded
|
|
|
|
// from the alloca. The optimizer will through the provenance analysis
|
|
|
|
// realize that the two are related, but since we only require KnownSafe in
|
|
|
|
// one direction, will match the inner retain on the original pointer with
|
|
|
|
// the guard release on the original pointer. This is fixed by ensuring that
|
2014-01-25 01:20:08 +08:00
|
|
|
// in the presence of allocas we only unconditionally remove pointers if
|
2013-05-14 07:49:42 +08:00
|
|
|
// both our retain and our release are KnownSafe.
|
|
|
|
if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
|
|
|
|
if (AreAnyUnderlyingObjectsAnAlloca(SI->getPointerOperand())) {
|
|
|
|
BBState::ptr_iterator I = MyStates.findPtrBottomUpState(
|
|
|
|
StripPointerCastsAndObjCCalls(SI->getValueOperand()));
|
|
|
|
if (I != MyStates.bottom_up_ptr_end())
|
2013-05-25 04:44:02 +08:00
|
|
|
MultiOwnersSet.insert(I->first);
|
2013-05-14 07:49:42 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
2012-03-23 02:24:56 +08:00
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Consider any other possible effects of this instruction on each
|
|
|
|
// pointer being tracked.
|
|
|
|
for (BBState::ptr_iterator MI = MyStates.bottom_up_ptr_begin(),
|
|
|
|
ME = MyStates.bottom_up_ptr_end(); MI != ME; ++MI) {
|
|
|
|
const Value *Ptr = MI->first;
|
|
|
|
if (Ptr == Arg)
|
|
|
|
continue; // Handled above.
|
|
|
|
PtrState &S = MI->second;
|
|
|
|
Sequence Seq = S.GetSeq();
|
|
|
|
|
|
|
|
// Check for possible releases.
|
|
|
|
if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "CanAlterRefCount: Seq: " << Seq << "; " << *Ptr
|
|
|
|
<< "\n");
|
2013-03-23 13:46:19 +08:00
|
|
|
S.ClearKnownPositiveRefCount();
|
2012-03-23 02:24:56 +08:00
|
|
|
switch (Seq) {
|
|
|
|
case S_Use:
|
|
|
|
S.SetSeq(S_CanRelease);
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
ANNOTATE_BOTTOMUP(Inst, Ptr, Seq, S.GetSeq());
|
2012-03-23 02:24:56 +08:00
|
|
|
continue;
|
|
|
|
case S_CanRelease:
|
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
|
|
|
case S_Stop:
|
|
|
|
case S_None:
|
|
|
|
break;
|
|
|
|
case S_Retain:
|
|
|
|
llvm_unreachable("bottom-up pointer in retain state!");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check for possible direct uses.
|
|
|
|
switch (Seq) {
|
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
|
|
|
if (CanUse(Inst, Ptr, PA, Class)) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "CanUse: Seq: " << Seq << "; " << *Ptr
|
|
|
|
<< "\n");
|
2013-06-22 03:44:27 +08:00
|
|
|
assert(!S.HasReverseInsertPts());
|
2012-03-24 01:47:54 +08:00
|
|
|
// If this is an invoke instruction, we're scanning it as part of
|
|
|
|
// one of its successor blocks, since we can't insert code after it
|
|
|
|
// in its own block, and we don't want to split critical edges.
|
|
|
|
if (isa<InvokeInst>(Inst))
|
2013-06-22 03:44:27 +08:00
|
|
|
S.InsertReverseInsertPt(BB->getFirstInsertionPt());
|
2012-03-24 01:47:54 +08:00
|
|
|
else
|
2014-03-02 20:27:27 +08:00
|
|
|
S.InsertReverseInsertPt(std::next(BasicBlock::iterator(Inst)));
|
2012-03-23 02:24:56 +08:00
|
|
|
S.SetSeq(S_Use);
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
ANNOTATE_BOTTOMUP(Inst, Ptr, Seq, S_Use);
|
2013-03-23 05:38:36 +08:00
|
|
|
} else if (Seq == S_Release && IsUser(Class)) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "PreciseReleaseUse: Seq: " << Seq << "; " << *Ptr
|
|
|
|
<< "\n");
|
2012-03-23 02:24:56 +08:00
|
|
|
// Non-movable releases depend on any possible objc pointer use.
|
|
|
|
S.SetSeq(S_Stop);
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
ANNOTATE_BOTTOMUP(Inst, Ptr, S_Release, S_Stop);
|
2013-06-22 03:44:27 +08:00
|
|
|
assert(!S.HasReverseInsertPts());
|
2012-03-24 01:47:54 +08:00
|
|
|
// As above; handle invoke specially.
|
|
|
|
if (isa<InvokeInst>(Inst))
|
2013-06-22 03:44:27 +08:00
|
|
|
S.InsertReverseInsertPt(BB->getFirstInsertionPt());
|
2012-03-24 01:47:54 +08:00
|
|
|
else
|
2014-03-02 20:27:27 +08:00
|
|
|
S.InsertReverseInsertPt(std::next(BasicBlock::iterator(Inst)));
|
2012-03-23 02:24:56 +08:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case S_Stop:
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
if (CanUse(Inst, Ptr, PA, Class)) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "PreciseStopUse: Seq: " << Seq << "; " << *Ptr
|
|
|
|
<< "\n");
|
2012-03-23 02:24:56 +08:00
|
|
|
S.SetSeq(S_Use);
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
ANNOTATE_BOTTOMUP(Inst, Ptr, Seq, S_Use);
|
|
|
|
}
|
2012-03-23 02:24:56 +08:00
|
|
|
break;
|
|
|
|
case S_CanRelease:
|
|
|
|
case S_Use:
|
|
|
|
case S_None:
|
|
|
|
break;
|
|
|
|
case S_Retain:
|
|
|
|
llvm_unreachable("bottom-up pointer in retain state!");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return NestingDetected;
|
|
|
|
}
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
bool
|
|
|
|
ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
MapVector<Value *, RRInfo> &Retains) {
|
2013-04-06 02:10:41 +08:00
|
|
|
|
|
|
|
DEBUG(dbgs() << "\n== ObjCARCOpt::VisitBottomUp ==\n");
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
bool NestingDetected = false;
|
|
|
|
BBState &MyStates = BBStates[BB];
|
|
|
|
|
|
|
|
// Merge the states from each successor to compute the initial state
|
|
|
|
// for the current block.
|
2012-08-28 02:31:36 +08:00
|
|
|
BBState::edge_iterator SI(MyStates.succ_begin()),
|
|
|
|
SE(MyStates.succ_end());
|
|
|
|
if (SI != SE) {
|
2012-04-25 06:53:18 +08:00
|
|
|
const BasicBlock *Succ = *SI;
|
|
|
|
DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Succ);
|
|
|
|
assert(I != BBStates.end());
|
|
|
|
MyStates.InitFromSucc(I->second);
|
|
|
|
++SI;
|
|
|
|
for (; SI != SE; ++SI) {
|
|
|
|
Succ = *SI;
|
|
|
|
I = BBStates.find(Succ);
|
|
|
|
assert(I != BBStates.end());
|
|
|
|
MyStates.MergeSucc(I->second);
|
|
|
|
}
|
2013-03-29 13:13:07 +08:00
|
|
|
}
|
2013-03-26 08:42:09 +08:00
|
|
|
|
2013-04-04 06:41:59 +08:00
|
|
|
// If ARC Annotations are enabled, output the current state of pointers at the
|
2013-04-04 07:07:45 +08:00
|
|
|
// bottom of the basic block.
|
2013-04-04 06:41:59 +08:00
|
|
|
ANNOTATE_BOTTOMUP_BBEND(MyStates, BB);
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// Visit all the instructions, bottom-up.
|
|
|
|
for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
|
2014-03-02 20:27:27 +08:00
|
|
|
Instruction *Inst = std::prev(I);
|
2012-03-24 01:47:54 +08:00
|
|
|
|
|
|
|
// Invoke instructions are visited as part of their successors (below).
|
|
|
|
if (isa<InvokeInst>(Inst))
|
|
|
|
continue;
|
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Visiting " << *Inst << "\n");
|
2013-01-13 15:00:51 +08:00
|
|
|
|
2012-03-24 01:47:54 +08:00
|
|
|
NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
|
|
|
|
}
|
|
|
|
|
2012-04-28 02:56:31 +08:00
|
|
|
// If there's a predecessor with an invoke, visit the invoke as if it were
|
|
|
|
// part of this block, since we can't insert code after an invoke in its own
|
|
|
|
// block, and we don't want to split critical edges.
|
2012-04-25 06:53:18 +08:00
|
|
|
for (BBState::edge_iterator PI(MyStates.pred_begin()),
|
|
|
|
PE(MyStates.pred_end()); PI != PE; ++PI) {
|
2012-03-24 01:47:54 +08:00
|
|
|
BasicBlock *Pred = *PI;
|
2012-04-28 02:56:31 +08:00
|
|
|
if (InvokeInst *II = dyn_cast<InvokeInst>(&Pred->back()))
|
|
|
|
NestingDetected |= VisitInstructionBottomUp(II, BB, Retains, MyStates);
|
2012-03-23 02:24:56 +08:00
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-04-04 06:41:59 +08:00
|
|
|
// If ARC Annotations are enabled, output the current state of pointers at the
|
|
|
|
// top of the basic block.
|
|
|
|
ANNOTATE_BOTTOMUP_BBSTART(MyStates, BB);
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2012-03-23 02:24:56 +08:00
|
|
|
return NestingDetected;
|
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2012-03-23 02:24:56 +08:00
|
|
|
bool
|
|
|
|
ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases,
|
|
|
|
BBState &MyStates) {
|
|
|
|
bool NestingDetected = false;
|
|
|
|
InstructionClass Class = GetInstructionClass(Inst);
|
|
|
|
const Value *Arg = 0;
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2012-03-23 02:24:56 +08:00
|
|
|
switch (Class) {
|
|
|
|
case IC_RetainBlock:
|
2013-03-29 04:11:19 +08:00
|
|
|
// In OptimizeIndividualCalls, we have strength reduced all optimizable
|
|
|
|
// objc_retainBlocks to objc_retains. Thus at this point any
|
|
|
|
// objc_retainBlocks that we see are not optimizable.
|
|
|
|
break;
|
2012-03-23 02:24:56 +08:00
|
|
|
case IC_Retain:
|
|
|
|
case IC_RetainRV: {
|
|
|
|
Arg = GetObjCArg(Inst);
|
|
|
|
|
|
|
|
PtrState &S = MyStates.getPtrTopDownState(Arg);
|
|
|
|
|
|
|
|
// Don't do retain+release tracking for IC_RetainRV, because it's
|
|
|
|
// better to let it remain as the first instruction after a call.
|
|
|
|
if (Class != IC_RetainRV) {
|
|
|
|
// If we see two retains in a row on the same pointer. If so, make
|
2011-06-16 07:37:01 +08:00
|
|
|
// a note, and we'll cicle back to revisit it after we've
|
2012-03-23 02:24:56 +08:00
|
|
|
// hopefully eliminated the second retain, which may allow us to
|
|
|
|
// eliminate the first retain too.
|
2011-06-16 07:37:01 +08:00
|
|
|
// Theoretically we could implement removal of nested retain+release
|
|
|
|
// pairs by making PtrState hold a stack of states, but this is
|
|
|
|
// simple and avoids adding overhead for the non-nested case.
|
2012-03-23 02:24:56 +08:00
|
|
|
if (S.GetSeq() == S_Retain)
|
2011-06-16 07:37:01 +08:00
|
|
|
NestingDetected = true;
|
|
|
|
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
ANNOTATE_TOPDOWN(Inst, Arg, S.GetSeq(), S_Retain);
|
2012-04-25 08:50:46 +08:00
|
|
|
S.ResetSequenceProgress(S_Retain);
|
2013-06-21 14:59:02 +08:00
|
|
|
S.SetKnownSafe(S.HasKnownPositiveRefCount());
|
2013-06-22 03:44:27 +08:00
|
|
|
S.InsertCall(Inst);
|
2012-03-23 02:24:56 +08:00
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2012-09-05 07:16:20 +08:00
|
|
|
S.SetKnownPositiveRefCount();
|
2012-07-24 03:27:31 +08:00
|
|
|
|
|
|
|
// A retain can be a potential use; procede to the generic checking
|
|
|
|
// code below.
|
|
|
|
break;
|
2012-03-23 02:24:56 +08:00
|
|
|
}
|
|
|
|
case IC_Release: {
|
|
|
|
Arg = GetObjCArg(Inst);
|
|
|
|
|
|
|
|
PtrState &S = MyStates.getPtrTopDownState(Arg);
|
2013-03-23 13:46:19 +08:00
|
|
|
S.ClearKnownPositiveRefCount();
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-06 06:54:28 +08:00
|
|
|
Sequence OldSeq = S.GetSeq();
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-06 06:54:28 +08:00
|
|
|
MDNode *ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-06 06:54:28 +08:00
|
|
|
switch (OldSeq) {
|
2012-03-23 02:24:56 +08:00
|
|
|
case S_Retain:
|
|
|
|
case S_CanRelease:
|
2013-04-06 06:54:28 +08:00
|
|
|
if (OldSeq == S_Retain || ReleaseMetadata != 0)
|
2013-06-22 03:44:27 +08:00
|
|
|
S.ClearReverseInsertPts();
|
2012-03-23 02:24:56 +08:00
|
|
|
// FALL THROUGH
|
|
|
|
case S_Use:
|
2013-06-21 15:03:07 +08:00
|
|
|
S.SetReleaseMetadata(ReleaseMetadata);
|
2013-06-21 15:00:44 +08:00
|
|
|
S.SetTailCallRelease(cast<CallInst>(Inst)->isTailCall());
|
2013-06-22 03:44:30 +08:00
|
|
|
Releases[Inst] = S.GetRRInfo();
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
ANNOTATE_TOPDOWN(Inst, Arg, S.GetSeq(), S_None);
|
2012-03-23 02:24:56 +08:00
|
|
|
S.ClearSequenceProgress();
|
2011-06-16 07:37:01 +08:00
|
|
|
break;
|
2012-03-23 02:24:56 +08:00
|
|
|
case S_None:
|
|
|
|
break;
|
|
|
|
case S_Stop:
|
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
|
|
|
llvm_unreachable("top-down pointer in release state!");
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
2012-03-23 02:24:56 +08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case IC_AutoreleasepoolPop:
|
|
|
|
// Conservatively, clear MyStates for all known pointers.
|
|
|
|
MyStates.clearTopDownPointers();
|
|
|
|
return NestingDetected;
|
|
|
|
case IC_AutoreleasepoolPush:
|
|
|
|
case IC_None:
|
|
|
|
// These are irrelevant.
|
|
|
|
return NestingDetected;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2012-03-23 02:24:56 +08:00
|
|
|
// Consider any other possible effects of this instruction on each
|
|
|
|
// pointer being tracked.
|
|
|
|
for (BBState::ptr_iterator MI = MyStates.top_down_ptr_begin(),
|
|
|
|
ME = MyStates.top_down_ptr_end(); MI != ME; ++MI) {
|
|
|
|
const Value *Ptr = MI->first;
|
|
|
|
if (Ptr == Arg)
|
|
|
|
continue; // Handled above.
|
|
|
|
PtrState &S = MI->second;
|
|
|
|
Sequence Seq = S.GetSeq();
|
|
|
|
|
|
|
|
// Check for possible releases.
|
|
|
|
if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
|
2013-04-06 02:26:08 +08:00
|
|
|
DEBUG(dbgs() << "CanAlterRefCount: Seq: " << Seq << "; " << *Ptr
|
2013-04-06 07:46:45 +08:00
|
|
|
<< "\n");
|
2013-03-23 13:46:19 +08:00
|
|
|
S.ClearKnownPositiveRefCount();
|
2012-03-23 02:24:56 +08:00
|
|
|
switch (Seq) {
|
|
|
|
case S_Retain:
|
|
|
|
S.SetSeq(S_CanRelease);
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
ANNOTATE_TOPDOWN(Inst, Ptr, Seq, S_CanRelease);
|
2013-06-22 03:44:27 +08:00
|
|
|
assert(!S.HasReverseInsertPts());
|
|
|
|
S.InsertReverseInsertPt(Inst);
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2012-03-23 02:24:56 +08:00
|
|
|
// One call can't cause a transition from S_Retain to S_CanRelease
|
|
|
|
// and S_CanRelease to S_Use. If we've made the first transition,
|
|
|
|
// we're done.
|
|
|
|
continue;
|
2011-06-16 07:37:01 +08:00
|
|
|
case S_Use:
|
|
|
|
case S_CanRelease:
|
|
|
|
case S_None:
|
|
|
|
break;
|
2012-03-23 02:24:56 +08:00
|
|
|
case S_Stop:
|
2011-06-16 07:37:01 +08:00
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
2012-03-23 02:24:56 +08:00
|
|
|
llvm_unreachable("top-down pointer in release state!");
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
}
|
2012-03-23 02:24:56 +08:00
|
|
|
|
|
|
|
// Check for possible direct uses.
|
|
|
|
switch (Seq) {
|
|
|
|
case S_CanRelease:
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
if (CanUse(Inst, Ptr, PA, Class)) {
|
2013-04-06 02:26:08 +08:00
|
|
|
DEBUG(dbgs() << "CanUse: Seq: " << Seq << "; " << *Ptr
|
|
|
|
<< "\n");
|
2012-03-23 02:24:56 +08:00
|
|
|
S.SetSeq(S_Use);
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
ANNOTATE_TOPDOWN(Inst, Ptr, Seq, S_Use);
|
|
|
|
}
|
2012-03-23 02:24:56 +08:00
|
|
|
break;
|
|
|
|
case S_Retain:
|
|
|
|
case S_Use:
|
|
|
|
case S_None:
|
|
|
|
break;
|
|
|
|
case S_Stop:
|
|
|
|
case S_Release:
|
|
|
|
case S_MovableRelease:
|
|
|
|
llvm_unreachable("top-down pointer in release state!");
|
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return NestingDetected;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
ObjCARCOpt::VisitTopDown(BasicBlock *BB,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "\n== ObjCARCOpt::VisitTopDown ==\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
bool NestingDetected = false;
|
|
|
|
BBState &MyStates = BBStates[BB];
|
|
|
|
|
|
|
|
// Merge the states from each predecessor to compute the initial state
|
|
|
|
// for the current block.
|
2012-08-28 02:31:36 +08:00
|
|
|
BBState::edge_iterator PI(MyStates.pred_begin()),
|
|
|
|
PE(MyStates.pred_end());
|
|
|
|
if (PI != PE) {
|
2012-04-25 06:53:18 +08:00
|
|
|
const BasicBlock *Pred = *PI;
|
|
|
|
DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Pred);
|
|
|
|
assert(I != BBStates.end());
|
|
|
|
MyStates.InitFromPred(I->second);
|
|
|
|
++PI;
|
|
|
|
for (; PI != PE; ++PI) {
|
|
|
|
Pred = *PI;
|
|
|
|
I = BBStates.find(Pred);
|
|
|
|
assert(I != BBStates.end());
|
|
|
|
MyStates.MergePred(I->second);
|
|
|
|
}
|
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-04-04 06:41:59 +08:00
|
|
|
// If ARC Annotations are enabled, output the current state of pointers at the
|
|
|
|
// top of the basic block.
|
|
|
|
ANNOTATE_TOPDOWN_BBSTART(MyStates, BB);
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// Visit all the instructions, top-down.
|
|
|
|
for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
|
|
|
|
Instruction *Inst = I;
|
2013-01-13 15:00:51 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Visiting " << *Inst << "\n");
|
2013-01-13 15:00:51 +08:00
|
|
|
|
2012-03-23 02:24:56 +08:00
|
|
|
NestingDetected |= VisitInstructionTopDown(Inst, Releases, MyStates);
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2013-04-04 06:41:59 +08:00
|
|
|
// If ARC Annotations are enabled, output the current state of pointers at the
|
|
|
|
// bottom of the basic block.
|
|
|
|
ANNOTATE_TOPDOWN_BBEND(MyStates, BB);
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2013-04-18 04:48:01 +08:00
|
|
|
#ifdef ARC_ANNOTATIONS
|
2013-04-18 05:03:53 +08:00
|
|
|
if (!(EnableARCAnnotations && DisableCheckForCFGHazards))
|
2013-04-18 04:48:01 +08:00
|
|
|
#endif
|
2011-06-16 07:37:01 +08:00
|
|
|
CheckForCFGHazards(BB, BBStates, MyStates);
|
|
|
|
return NestingDetected;
|
|
|
|
}
|
|
|
|
|
2011-12-13 03:42:25 +08:00
|
|
|
static void
|
|
|
|
ComputePostOrders(Function &F,
|
|
|
|
SmallVectorImpl<BasicBlock *> &PostOrder,
|
2012-04-25 06:53:18 +08:00
|
|
|
SmallVectorImpl<BasicBlock *> &ReverseCFGPostOrder,
|
|
|
|
unsigned NoObjCARCExceptionsMDKind,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates) {
|
2013-01-14 08:35:14 +08:00
|
|
|
/// The visited set, for doing DFS walks.
|
2011-08-12 08:24:29 +08:00
|
|
|
SmallPtrSet<BasicBlock *, 16> Visited;
|
2011-12-13 03:42:25 +08:00
|
|
|
|
|
|
|
// Do DFS, computing the PostOrder.
|
|
|
|
SmallPtrSet<BasicBlock *, 16> OnStack;
|
|
|
|
SmallVector<std::pair<BasicBlock *, succ_iterator>, 16> SuccStack;
|
2012-04-25 06:53:18 +08:00
|
|
|
|
|
|
|
// Functions always have exactly one entry block, and we don't have
|
|
|
|
// any other block that we treat like an entry block.
|
2011-12-13 03:42:25 +08:00
|
|
|
BasicBlock *EntryBB = &F.getEntryBlock();
|
2012-05-09 07:39:44 +08:00
|
|
|
BBState &MyStates = BBStates[EntryBB];
|
|
|
|
MyStates.SetAsEntry();
|
|
|
|
TerminatorInst *EntryTI = cast<TerminatorInst>(&EntryBB->back());
|
|
|
|
SuccStack.push_back(std::make_pair(EntryBB, succ_iterator(EntryTI)));
|
2011-12-13 03:42:25 +08:00
|
|
|
Visited.insert(EntryBB);
|
|
|
|
OnStack.insert(EntryBB);
|
|
|
|
do {
|
|
|
|
dfs_next_succ:
|
2012-04-25 06:53:18 +08:00
|
|
|
BasicBlock *CurrBB = SuccStack.back().first;
|
|
|
|
TerminatorInst *TI = cast<TerminatorInst>(&CurrBB->back());
|
|
|
|
succ_iterator SE(TI, false);
|
2012-05-09 07:39:44 +08:00
|
|
|
|
2012-04-25 06:53:18 +08:00
|
|
|
while (SuccStack.back().second != SE) {
|
|
|
|
BasicBlock *SuccBB = *SuccStack.back().second++;
|
|
|
|
if (Visited.insert(SuccBB)) {
|
2012-05-09 07:39:44 +08:00
|
|
|
TerminatorInst *TI = cast<TerminatorInst>(&SuccBB->back());
|
|
|
|
SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
|
2012-04-25 06:53:18 +08:00
|
|
|
BBStates[CurrBB].addSucc(SuccBB);
|
2012-05-09 07:39:44 +08:00
|
|
|
BBState &SuccStates = BBStates[SuccBB];
|
|
|
|
SuccStates.addPred(CurrBB);
|
2012-04-25 06:53:18 +08:00
|
|
|
OnStack.insert(SuccBB);
|
2011-12-13 03:42:25 +08:00
|
|
|
goto dfs_next_succ;
|
|
|
|
}
|
2012-04-25 06:53:18 +08:00
|
|
|
|
|
|
|
if (!OnStack.count(SuccBB)) {
|
|
|
|
BBStates[CurrBB].addSucc(SuccBB);
|
|
|
|
BBStates[SuccBB].addPred(CurrBB);
|
|
|
|
}
|
2011-12-13 03:42:25 +08:00
|
|
|
}
|
2012-04-25 06:53:18 +08:00
|
|
|
OnStack.erase(CurrBB);
|
|
|
|
PostOrder.push_back(CurrBB);
|
|
|
|
SuccStack.pop_back();
|
2011-12-13 03:42:25 +08:00
|
|
|
} while (!SuccStack.empty());
|
|
|
|
|
|
|
|
Visited.clear();
|
|
|
|
|
2012-04-25 06:53:18 +08:00
|
|
|
// Do reverse-CFG DFS, computing the reverse-CFG PostOrder.
|
|
|
|
// Functions may have many exits, and there also blocks which we treat
|
|
|
|
// as exits due to ignored edges.
|
|
|
|
SmallVector<std::pair<BasicBlock *, BBState::edge_iterator>, 16> PredStack;
|
2011-08-12 08:24:29 +08:00
|
|
|
for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
|
2012-04-25 06:53:18 +08:00
|
|
|
BasicBlock *ExitBB = I;
|
|
|
|
BBState &MyStates = BBStates[ExitBB];
|
|
|
|
if (!MyStates.isExit())
|
|
|
|
continue;
|
2011-12-13 03:42:25 +08:00
|
|
|
|
2012-04-28 02:56:31 +08:00
|
|
|
MyStates.SetAsExit();
|
2012-04-25 06:53:18 +08:00
|
|
|
|
|
|
|
PredStack.push_back(std::make_pair(ExitBB, MyStates.pred_begin()));
|
2011-12-13 03:42:25 +08:00
|
|
|
Visited.insert(ExitBB);
|
|
|
|
while (!PredStack.empty()) {
|
|
|
|
reverse_dfs_next_succ:
|
2012-04-25 06:53:18 +08:00
|
|
|
BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
|
|
|
|
while (PredStack.back().second != PE) {
|
2011-12-13 03:42:25 +08:00
|
|
|
BasicBlock *BB = *PredStack.back().second++;
|
|
|
|
if (Visited.insert(BB)) {
|
2012-04-25 06:53:18 +08:00
|
|
|
PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
|
2011-12-13 03:42:25 +08:00
|
|
|
goto reverse_dfs_next_succ;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ReverseCFGPostOrder.push_back(PredStack.pop_back_val().first);
|
|
|
|
}
|
2011-08-12 08:24:29 +08:00
|
|
|
}
|
2011-12-13 03:42:25 +08:00
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
// Visit the function both top-down and bottom-up.
|
2011-12-13 03:42:25 +08:00
|
|
|
bool
|
|
|
|
ObjCARCOpt::Visit(Function &F,
|
|
|
|
DenseMap<const BasicBlock *, BBState> &BBStates,
|
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases) {
|
|
|
|
|
|
|
|
// Use reverse-postorder traversals, because we magically know that loops
|
|
|
|
// will be well behaved, i.e. they won't repeatedly call retain on a single
|
|
|
|
// pointer without doing a release. We can't use the ReversePostOrderTraversal
|
|
|
|
// class here because we want the reverse-CFG postorder to consider each
|
|
|
|
// function exit point, and we want to ignore selected cycle edges.
|
|
|
|
SmallVector<BasicBlock *, 16> PostOrder;
|
|
|
|
SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
|
2012-04-25 06:53:18 +08:00
|
|
|
ComputePostOrders(F, PostOrder, ReverseCFGPostOrder,
|
|
|
|
NoObjCARCExceptionsMDKind,
|
|
|
|
BBStates);
|
2011-12-13 03:42:25 +08:00
|
|
|
|
|
|
|
// Use reverse-postorder on the reverse CFG for bottom-up.
|
2011-06-16 07:37:01 +08:00
|
|
|
bool BottomUpNestingDetected = false;
|
2011-08-19 05:27:42 +08:00
|
|
|
for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
|
2011-12-13 03:42:25 +08:00
|
|
|
ReverseCFGPostOrder.rbegin(), E = ReverseCFGPostOrder.rend();
|
|
|
|
I != E; ++I)
|
|
|
|
BottomUpNestingDetected |= VisitBottomUp(*I, BBStates, Retains);
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2011-12-13 03:42:25 +08:00
|
|
|
// Use reverse-postorder for top-down.
|
2011-06-16 07:37:01 +08:00
|
|
|
bool TopDownNestingDetected = false;
|
2011-12-13 03:42:25 +08:00
|
|
|
for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
|
|
|
|
PostOrder.rbegin(), E = PostOrder.rend();
|
|
|
|
I != E; ++I)
|
|
|
|
TopDownNestingDetected |= VisitTopDown(*I, BBStates, Releases);
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
return TopDownNestingDetected && BottomUpNestingDetected;
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Move the calls in RetainsToMove and ReleasesToMove.
|
2011-06-16 07:37:01 +08:00
|
|
|
void ObjCARCOpt::MoveCalls(Value *Arg,
|
|
|
|
RRInfo &RetainsToMove,
|
|
|
|
RRInfo &ReleasesToMove,
|
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases,
|
2011-07-23 06:29:21 +08:00
|
|
|
SmallVectorImpl<Instruction *> &DeadInsts,
|
2013-04-06 07:46:45 +08:00
|
|
|
Module *M) {
|
2011-07-18 12:54:35 +08:00
|
|
|
Type *ArgTy = Arg->getType();
|
2011-07-23 06:29:21 +08:00
|
|
|
Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "== ObjCARCOpt::MoveCalls ==\n");
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// Insert the new retain and release calls.
|
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
PI = ReleasesToMove.ReverseInsertPts.begin(),
|
|
|
|
PE = ReleasesToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
|
|
|
|
Instruction *InsertPt = *PI;
|
|
|
|
Value *MyArg = ArgTy == ParamTy ? Arg :
|
|
|
|
new BitCastInst(Arg, ParamTy, "", InsertPt);
|
2013-07-06 09:39:23 +08:00
|
|
|
Constant *Decl = EP.get(ARCRuntimeEntryPoints::EPT_Retain);
|
|
|
|
CallInst *Call = CallInst::Create(Decl, MyArg, "", InsertPt);
|
2011-06-16 07:37:01 +08:00
|
|
|
Call->setDoesNotThrow();
|
2013-03-29 07:08:44 +08:00
|
|
|
Call->setTailCall();
|
2013-03-29 13:13:07 +08:00
|
|
|
|
2013-04-21 08:30:50 +08:00
|
|
|
DEBUG(dbgs() << "Inserting new Retain: " << *Call << "\n"
|
2013-04-06 02:10:41 +08:00
|
|
|
"At insertion point: " << *InsertPt << "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
PI = RetainsToMove.ReverseInsertPts.begin(),
|
|
|
|
PE = RetainsToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
|
2012-03-24 01:47:54 +08:00
|
|
|
Instruction *InsertPt = *PI;
|
|
|
|
Value *MyArg = ArgTy == ParamTy ? Arg :
|
|
|
|
new BitCastInst(Arg, ParamTy, "", InsertPt);
|
2013-07-06 09:39:23 +08:00
|
|
|
Constant *Decl = EP.get(ARCRuntimeEntryPoints::EPT_Release);
|
|
|
|
CallInst *Call = CallInst::Create(Decl, MyArg, "", InsertPt);
|
2012-03-24 01:47:54 +08:00
|
|
|
// Attach a clang.imprecise_release metadata tag, if appropriate.
|
|
|
|
if (MDNode *M = ReleasesToMove.ReleaseMetadata)
|
|
|
|
Call->setMetadata(ImpreciseReleaseMDKind, M);
|
|
|
|
Call->setDoesNotThrow();
|
|
|
|
if (ReleasesToMove.IsTailCallRelease)
|
|
|
|
Call->setTailCall();
|
2013-01-10 03:23:24 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Inserting new Release: " << *Call << "\n"
|
|
|
|
"At insertion point: " << *InsertPt << "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Delete the original retain and release calls.
|
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
AI = RetainsToMove.Calls.begin(),
|
|
|
|
AE = RetainsToMove.Calls.end(); AI != AE; ++AI) {
|
|
|
|
Instruction *OrigRetain = *AI;
|
|
|
|
Retains.blot(OrigRetain);
|
|
|
|
DeadInsts.push_back(OrigRetain);
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Deleting retain: " << *OrigRetain << "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
AI = ReleasesToMove.Calls.begin(),
|
|
|
|
AE = ReleasesToMove.Calls.end(); AI != AE; ++AI) {
|
|
|
|
Instruction *OrigRelease = *AI;
|
|
|
|
Releases.erase(OrigRelease);
|
|
|
|
DeadInsts.push_back(OrigRelease);
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Deleting release: " << *OrigRelease << "\n");
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-01-23 05:49:00 +08:00
|
|
|
bool
|
|
|
|
ObjCARCOpt::ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState>
|
|
|
|
&BBStates,
|
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
|
|
|
DenseMap<Value *, RRInfo> &Releases,
|
|
|
|
Module *M,
|
2013-07-14 12:42:23 +08:00
|
|
|
SmallVectorImpl<Instruction *> &NewRetains,
|
|
|
|
SmallVectorImpl<Instruction *> &NewReleases,
|
|
|
|
SmallVectorImpl<Instruction *> &DeadInsts,
|
2013-01-23 05:49:00 +08:00
|
|
|
RRInfo &RetainsToMove,
|
|
|
|
RRInfo &ReleasesToMove,
|
|
|
|
Value *Arg,
|
|
|
|
bool KnownSafe,
|
|
|
|
bool &AnyPairsCompletelyEliminated) {
|
|
|
|
// If a pair happens in a region where it is known that the reference count
|
2013-05-14 07:49:42 +08:00
|
|
|
// is already incremented, we can similarly ignore possible decrements unless
|
|
|
|
// we are dealing with a retainable object with multiple provenance sources.
|
2013-01-23 05:49:00 +08:00
|
|
|
bool KnownSafeTD = true, KnownSafeBU = true;
|
2013-05-14 07:49:42 +08:00
|
|
|
bool MultipleOwners = false;
|
2013-05-25 04:44:05 +08:00
|
|
|
bool CFGHazardAfflicted = false;
|
2013-01-23 05:49:00 +08:00
|
|
|
|
|
|
|
// Connect the dots between the top-down-collected RetainsToMove and
|
|
|
|
// bottom-up-collected ReleasesToMove to form sets of related calls.
|
|
|
|
// This is an iterative process so that we connect multiple releases
|
|
|
|
// to multiple retains if needed.
|
|
|
|
unsigned OldDelta = 0;
|
|
|
|
unsigned NewDelta = 0;
|
|
|
|
unsigned OldCount = 0;
|
|
|
|
unsigned NewCount = 0;
|
|
|
|
bool FirstRelease = true;
|
|
|
|
for (;;) {
|
|
|
|
for (SmallVectorImpl<Instruction *>::const_iterator
|
|
|
|
NI = NewRetains.begin(), NE = NewRetains.end(); NI != NE; ++NI) {
|
|
|
|
Instruction *NewRetain = *NI;
|
|
|
|
MapVector<Value *, RRInfo>::const_iterator It = Retains.find(NewRetain);
|
|
|
|
assert(It != Retains.end());
|
|
|
|
const RRInfo &NewRetainRRI = It->second;
|
|
|
|
KnownSafeTD &= NewRetainRRI.KnownSafe;
|
2013-05-25 04:44:02 +08:00
|
|
|
MultipleOwners =
|
|
|
|
MultipleOwners || MultiOwnersSet.count(GetObjCArg(NewRetain));
|
2013-01-23 05:49:00 +08:00
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
LI = NewRetainRRI.Calls.begin(),
|
|
|
|
LE = NewRetainRRI.Calls.end(); LI != LE; ++LI) {
|
|
|
|
Instruction *NewRetainRelease = *LI;
|
|
|
|
DenseMap<Value *, RRInfo>::const_iterator Jt =
|
|
|
|
Releases.find(NewRetainRelease);
|
|
|
|
if (Jt == Releases.end())
|
|
|
|
return false;
|
|
|
|
const RRInfo &NewRetainReleaseRRI = Jt->second;
|
2013-11-06 00:02:40 +08:00
|
|
|
|
|
|
|
// If the release does not have a reference to the retain as well,
|
|
|
|
// something happened which is unaccounted for. Do not do anything.
|
|
|
|
//
|
|
|
|
// This can happen if we catch an additive overflow during path count
|
|
|
|
// merging.
|
|
|
|
if (!NewRetainReleaseRRI.Calls.count(NewRetain))
|
|
|
|
return false;
|
|
|
|
|
2013-01-23 05:49:00 +08:00
|
|
|
if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
|
2013-06-07 14:16:49 +08:00
|
|
|
|
|
|
|
// If we overflow when we compute the path count, don't remove/move
|
|
|
|
// anything.
|
|
|
|
const BBState &NRRBBState = BBStates[NewRetainRelease->getParent()];
|
2013-08-10 07:22:27 +08:00
|
|
|
unsigned PathCount = BBState::OverflowOccurredValue;
|
2013-06-07 14:16:49 +08:00
|
|
|
if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
|
|
|
|
return false;
|
2013-08-10 07:22:27 +08:00
|
|
|
assert(PathCount != BBState::OverflowOccurredValue &&
|
|
|
|
"PathCount at this point can not be "
|
|
|
|
"OverflowOccurredValue.");
|
2013-06-07 14:16:49 +08:00
|
|
|
OldDelta -= PathCount;
|
2013-01-23 05:49:00 +08:00
|
|
|
|
|
|
|
// Merge the ReleaseMetadata and IsTailCallRelease values.
|
|
|
|
if (FirstRelease) {
|
|
|
|
ReleasesToMove.ReleaseMetadata =
|
|
|
|
NewRetainReleaseRRI.ReleaseMetadata;
|
|
|
|
ReleasesToMove.IsTailCallRelease =
|
|
|
|
NewRetainReleaseRRI.IsTailCallRelease;
|
|
|
|
FirstRelease = false;
|
|
|
|
} else {
|
|
|
|
if (ReleasesToMove.ReleaseMetadata !=
|
|
|
|
NewRetainReleaseRRI.ReleaseMetadata)
|
|
|
|
ReleasesToMove.ReleaseMetadata = 0;
|
|
|
|
if (ReleasesToMove.IsTailCallRelease !=
|
|
|
|
NewRetainReleaseRRI.IsTailCallRelease)
|
|
|
|
ReleasesToMove.IsTailCallRelease = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Collect the optimal insertion points.
|
|
|
|
if (!KnownSafe)
|
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
RI = NewRetainReleaseRRI.ReverseInsertPts.begin(),
|
|
|
|
RE = NewRetainReleaseRRI.ReverseInsertPts.end();
|
|
|
|
RI != RE; ++RI) {
|
|
|
|
Instruction *RIP = *RI;
|
2013-06-07 14:16:49 +08:00
|
|
|
if (ReleasesToMove.ReverseInsertPts.insert(RIP)) {
|
|
|
|
// If we overflow when we compute the path count, don't
|
|
|
|
// remove/move anything.
|
|
|
|
const BBState &RIPBBState = BBStates[RIP->getParent()];
|
2013-08-10 07:22:27 +08:00
|
|
|
PathCount = BBState::OverflowOccurredValue;
|
2013-06-07 14:16:49 +08:00
|
|
|
if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
|
|
|
|
return false;
|
2013-08-10 07:22:27 +08:00
|
|
|
assert(PathCount != BBState::OverflowOccurredValue &&
|
|
|
|
"PathCount at this point can not be "
|
|
|
|
"OverflowOccurredValue.");
|
2013-06-07 14:16:49 +08:00
|
|
|
NewDelta -= PathCount;
|
|
|
|
}
|
2013-01-23 05:49:00 +08:00
|
|
|
}
|
|
|
|
NewReleases.push_back(NewRetainRelease);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NewRetains.clear();
|
|
|
|
if (NewReleases.empty()) break;
|
|
|
|
|
|
|
|
// Back the other way.
|
|
|
|
for (SmallVectorImpl<Instruction *>::const_iterator
|
|
|
|
NI = NewReleases.begin(), NE = NewReleases.end(); NI != NE; ++NI) {
|
|
|
|
Instruction *NewRelease = *NI;
|
|
|
|
DenseMap<Value *, RRInfo>::const_iterator It =
|
|
|
|
Releases.find(NewRelease);
|
|
|
|
assert(It != Releases.end());
|
|
|
|
const RRInfo &NewReleaseRRI = It->second;
|
|
|
|
KnownSafeBU &= NewReleaseRRI.KnownSafe;
|
2013-05-25 04:44:05 +08:00
|
|
|
CFGHazardAfflicted |= NewReleaseRRI.CFGHazardAfflicted;
|
2013-01-23 05:49:00 +08:00
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
LI = NewReleaseRRI.Calls.begin(),
|
|
|
|
LE = NewReleaseRRI.Calls.end(); LI != LE; ++LI) {
|
|
|
|
Instruction *NewReleaseRetain = *LI;
|
|
|
|
MapVector<Value *, RRInfo>::const_iterator Jt =
|
|
|
|
Retains.find(NewReleaseRetain);
|
|
|
|
if (Jt == Retains.end())
|
|
|
|
return false;
|
|
|
|
const RRInfo &NewReleaseRetainRRI = Jt->second;
|
2013-06-07 14:16:49 +08:00
|
|
|
|
2013-11-06 00:02:40 +08:00
|
|
|
// If the retain does not have a reference to the release as well,
|
|
|
|
// something happened which is unaccounted for. Do not do anything.
|
|
|
|
//
|
|
|
|
// This can happen if we catch an additive overflow during path count
|
|
|
|
// merging.
|
|
|
|
if (!NewReleaseRetainRRI.Calls.count(NewRelease))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
|
2013-06-07 14:16:49 +08:00
|
|
|
// If we overflow when we compute the path count, don't remove/move
|
|
|
|
// anything.
|
|
|
|
const BBState &NRRBBState = BBStates[NewReleaseRetain->getParent()];
|
2013-08-10 07:22:27 +08:00
|
|
|
unsigned PathCount = BBState::OverflowOccurredValue;
|
2013-06-07 14:16:49 +08:00
|
|
|
if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
|
|
|
|
return false;
|
2013-08-10 07:22:27 +08:00
|
|
|
assert(PathCount != BBState::OverflowOccurredValue &&
|
|
|
|
"PathCount at this point can not be "
|
|
|
|
"OverflowOccurredValue.");
|
2013-01-23 05:49:00 +08:00
|
|
|
OldDelta += PathCount;
|
|
|
|
OldCount += PathCount;
|
|
|
|
|
|
|
|
// Collect the optimal insertion points.
|
|
|
|
if (!KnownSafe)
|
|
|
|
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
|
|
|
RI = NewReleaseRetainRRI.ReverseInsertPts.begin(),
|
|
|
|
RE = NewReleaseRetainRRI.ReverseInsertPts.end();
|
|
|
|
RI != RE; ++RI) {
|
|
|
|
Instruction *RIP = *RI;
|
|
|
|
if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
|
2013-06-07 14:16:49 +08:00
|
|
|
// If we overflow when we compute the path count, don't
|
|
|
|
// remove/move anything.
|
|
|
|
const BBState &RIPBBState = BBStates[RIP->getParent()];
|
2013-08-10 07:22:27 +08:00
|
|
|
|
|
|
|
PathCount = BBState::OverflowOccurredValue;
|
2013-06-07 14:16:49 +08:00
|
|
|
if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
|
|
|
|
return false;
|
2013-08-10 07:22:27 +08:00
|
|
|
assert(PathCount != BBState::OverflowOccurredValue &&
|
|
|
|
"PathCount at this point can not be "
|
|
|
|
"OverflowOccurredValue.");
|
2013-01-23 05:49:00 +08:00
|
|
|
NewDelta += PathCount;
|
|
|
|
NewCount += PathCount;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NewRetains.push_back(NewReleaseRetain);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NewReleases.clear();
|
|
|
|
if (NewRetains.empty()) break;
|
|
|
|
}
|
|
|
|
|
2013-05-14 07:49:42 +08:00
|
|
|
// If the pointer is known incremented in 1 direction and we do not have
|
|
|
|
// MultipleOwners, we can safely remove the retain/releases. Otherwise we need
|
|
|
|
// to be known safe in both directions.
|
|
|
|
bool UnconditionallySafe = (KnownSafeTD && KnownSafeBU) ||
|
|
|
|
((KnownSafeTD || KnownSafeBU) && !MultipleOwners);
|
|
|
|
if (UnconditionallySafe) {
|
2013-01-23 05:49:00 +08:00
|
|
|
RetainsToMove.ReverseInsertPts.clear();
|
|
|
|
ReleasesToMove.ReverseInsertPts.clear();
|
|
|
|
NewCount = 0;
|
|
|
|
} else {
|
|
|
|
// Determine whether the new insertion points we computed preserve the
|
|
|
|
// balance of retain and release calls through the program.
|
|
|
|
// TODO: If the fully aggressive solution isn't valid, try to find a
|
|
|
|
// less aggressive solution which is.
|
|
|
|
if (NewDelta != 0)
|
|
|
|
return false;
|
2013-05-25 04:44:05 +08:00
|
|
|
|
|
|
|
// At this point, we are not going to remove any RR pairs, but we still are
|
|
|
|
// able to move RR pairs. If one of our pointers is afflicted with
|
|
|
|
// CFGHazards, we cannot perform such code motion so exit early.
|
|
|
|
const bool WillPerformCodeMotion = RetainsToMove.ReverseInsertPts.size() ||
|
|
|
|
ReleasesToMove.ReverseInsertPts.size();
|
|
|
|
if (CFGHazardAfflicted && WillPerformCodeMotion)
|
|
|
|
return false;
|
2013-01-23 05:49:00 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Determine whether the original call points are balanced in the retain and
|
|
|
|
// release calls through the program. If not, conservatively don't touch
|
|
|
|
// them.
|
|
|
|
// TODO: It's theoretically possible to do code motion in this case, as
|
|
|
|
// long as the existing imbalances are maintained.
|
|
|
|
if (OldDelta != 0)
|
|
|
|
return false;
|
2013-04-29 14:16:55 +08:00
|
|
|
|
2013-04-29 13:13:13 +08:00
|
|
|
#ifdef ARC_ANNOTATIONS
|
|
|
|
// Do not move calls if ARC annotations are requested.
|
2013-04-29 13:25:39 +08:00
|
|
|
if (EnableARCAnnotations)
|
|
|
|
return false;
|
2013-04-29 13:13:13 +08:00
|
|
|
#endif // ARC_ANNOTATIONS
|
2013-01-23 05:49:00 +08:00
|
|
|
|
|
|
|
Changed = true;
|
|
|
|
assert(OldCount != 0 && "Unreachable code?");
|
|
|
|
NumRRs += OldCount - NewCount;
|
|
|
|
// Set to true if we completely removed any RR pairs.
|
2013-01-23 05:53:43 +08:00
|
|
|
AnyPairsCompletelyEliminated = NewCount == 0;
|
2013-01-23 05:49:00 +08:00
|
|
|
|
|
|
|
// We can move calls!
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Identify pairings between the retains and releases, and delete and/or move
|
|
|
|
/// them.
|
2011-06-16 07:37:01 +08:00
|
|
|
bool
|
|
|
|
ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
|
|
|
|
&BBStates,
|
|
|
|
MapVector<Value *, RRInfo> &Retains,
|
2011-07-23 06:29:21 +08:00
|
|
|
DenseMap<Value *, RRInfo> &Releases,
|
|
|
|
Module *M) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "\n== ObjCARCOpt::PerformCodePlacement ==\n");
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
bool AnyPairsCompletelyEliminated = false;
|
|
|
|
RRInfo RetainsToMove;
|
|
|
|
RRInfo ReleasesToMove;
|
|
|
|
SmallVector<Instruction *, 4> NewRetains;
|
|
|
|
SmallVector<Instruction *, 4> NewReleases;
|
|
|
|
SmallVector<Instruction *, 8> DeadInsts;
|
|
|
|
|
2012-04-14 02:57:48 +08:00
|
|
|
// Visit each retain.
|
2011-06-16 07:37:01 +08:00
|
|
|
for (MapVector<Value *, RRInfo>::const_iterator I = Retains.begin(),
|
2011-09-30 06:25:23 +08:00
|
|
|
E = Retains.end(); I != E; ++I) {
|
|
|
|
Value *V = I->first;
|
2011-06-16 07:37:01 +08:00
|
|
|
if (!V) continue; // blotted
|
|
|
|
|
|
|
|
Instruction *Retain = cast<Instruction>(V);
|
2013-01-10 03:23:24 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Visiting: " << *Retain << "\n");
|
2013-01-10 03:23:24 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
Value *Arg = GetObjCArg(Retain);
|
|
|
|
|
2012-01-13 08:39:07 +08:00
|
|
|
// If the object being released is in static or stack storage, we know it's
|
2011-06-16 07:37:01 +08:00
|
|
|
// not being managed by ObjC reference counting, so we can delete pairs
|
|
|
|
// regardless of what possible decrements or uses lie between them.
|
2012-01-13 08:39:07 +08:00
|
|
|
bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
|
2012-05-09 07:39:44 +08:00
|
|
|
|
2011-08-23 01:29:11 +08:00
|
|
|
// A constant pointer can't be pointing to an object on the heap. It may
|
|
|
|
// be reference-counted, but it won't be deleted.
|
|
|
|
if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
|
|
|
|
if (const GlobalVariable *GV =
|
|
|
|
dyn_cast<GlobalVariable>(
|
|
|
|
StripPointerCastsAndObjCCalls(LI->getPointerOperand())))
|
|
|
|
if (GV->isConstant())
|
|
|
|
KnownSafe = true;
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// Connect the dots between the top-down-collected RetainsToMove and
|
|
|
|
// bottom-up-collected ReleasesToMove to form sets of related calls.
|
|
|
|
NewRetains.push_back(Retain);
|
2013-01-23 05:49:00 +08:00
|
|
|
bool PerformMoveCalls =
|
|
|
|
ConnectTDBUTraversals(BBStates, Retains, Releases, M, NewRetains,
|
|
|
|
NewReleases, DeadInsts, RetainsToMove,
|
|
|
|
ReleasesToMove, Arg, KnownSafe,
|
|
|
|
AnyPairsCompletelyEliminated);
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-01-23 05:49:00 +08:00
|
|
|
if (PerformMoveCalls) {
|
|
|
|
// Ok, everything checks out and we're all set. Let's move/delete some
|
|
|
|
// code!
|
|
|
|
MoveCalls(Arg, RetainsToMove, ReleasesToMove,
|
|
|
|
Retains, Releases, DeadInsts, M);
|
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-01-23 05:49:00 +08:00
|
|
|
// Clean up state for next retain.
|
2011-06-16 07:37:01 +08:00
|
|
|
NewReleases.clear();
|
|
|
|
NewRetains.clear();
|
|
|
|
RetainsToMove.clear();
|
|
|
|
ReleasesToMove.clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Now that we're done moving everything, we can delete the newly dead
|
|
|
|
// instructions, as we no longer need them as insert points.
|
|
|
|
while (!DeadInsts.empty())
|
|
|
|
EraseInstruction(DeadInsts.pop_back_val());
|
|
|
|
|
|
|
|
return AnyPairsCompletelyEliminated;
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Weak pointer optimizations.
|
2011-06-16 07:37:01 +08:00
|
|
|
void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeWeakCalls ==\n");
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// First, do memdep-style RLE and S2L optimizations. We can't use memdep
|
|
|
|
// itself because it uses AliasAnalysis and we need to do provenance
|
|
|
|
// queries instead.
|
|
|
|
for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
|
|
|
|
Instruction *Inst = &*I++;
|
2013-01-02 00:05:48 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
|
2013-01-02 00:05:48 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
InstructionClass Class = GetBasicInstructionClass(Inst);
|
|
|
|
if (Class != IC_LoadWeak && Class != IC_LoadWeakRetained)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// Delete objc_loadWeak calls with no users.
|
|
|
|
if (Class == IC_LoadWeak && Inst->use_empty()) {
|
|
|
|
Inst->eraseFromParent();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: For now, just look for an earlier available version of this value
|
|
|
|
// within the same block. Theoretically, we could do memdep-style non-local
|
|
|
|
// analysis too, but that would want caching. A better approach would be to
|
|
|
|
// use the technique that EarlyCSE uses.
|
2014-03-02 20:27:27 +08:00
|
|
|
inst_iterator Current = std::prev(I);
|
2011-06-16 07:37:01 +08:00
|
|
|
BasicBlock *CurrentBB = Current.getBasicBlockIterator();
|
|
|
|
for (BasicBlock::iterator B = CurrentBB->begin(),
|
|
|
|
J = Current.getInstructionIterator();
|
|
|
|
J != B; --J) {
|
2014-03-02 20:27:27 +08:00
|
|
|
Instruction *EarlierInst = &*std::prev(J);
|
2011-06-16 07:37:01 +08:00
|
|
|
InstructionClass EarlierClass = GetInstructionClass(EarlierInst);
|
|
|
|
switch (EarlierClass) {
|
|
|
|
case IC_LoadWeak:
|
|
|
|
case IC_LoadWeakRetained: {
|
|
|
|
// If this is loading from the same pointer, replace this load's value
|
|
|
|
// with that one.
|
|
|
|
CallInst *Call = cast<CallInst>(Inst);
|
|
|
|
CallInst *EarlierCall = cast<CallInst>(EarlierInst);
|
|
|
|
Value *Arg = Call->getArgOperand(0);
|
|
|
|
Value *EarlierArg = EarlierCall->getArgOperand(0);
|
|
|
|
switch (PA.getAA()->alias(Arg, EarlierArg)) {
|
|
|
|
case AliasAnalysis::MustAlias:
|
|
|
|
Changed = true;
|
|
|
|
// If the load has a builtin retain, insert a plain retain for it.
|
|
|
|
if (Class == IC_LoadWeakRetained) {
|
2013-07-06 09:39:23 +08:00
|
|
|
Constant *Decl = EP.get(ARCRuntimeEntryPoints::EPT_Retain);
|
|
|
|
CallInst *CI = CallInst::Create(Decl, EarlierCall, "", Call);
|
2011-06-16 07:37:01 +08:00
|
|
|
CI->setTailCall();
|
|
|
|
}
|
|
|
|
// Zap the fully redundant load.
|
|
|
|
Call->replaceAllUsesWith(EarlierCall);
|
|
|
|
Call->eraseFromParent();
|
|
|
|
goto clobbered;
|
|
|
|
case AliasAnalysis::MayAlias:
|
|
|
|
case AliasAnalysis::PartialAlias:
|
|
|
|
goto clobbered;
|
|
|
|
case AliasAnalysis::NoAlias:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case IC_StoreWeak:
|
|
|
|
case IC_InitWeak: {
|
|
|
|
// If this is storing to the same pointer and has the same size etc.
|
|
|
|
// replace this load's value with the stored value.
|
|
|
|
CallInst *Call = cast<CallInst>(Inst);
|
|
|
|
CallInst *EarlierCall = cast<CallInst>(EarlierInst);
|
|
|
|
Value *Arg = Call->getArgOperand(0);
|
|
|
|
Value *EarlierArg = EarlierCall->getArgOperand(0);
|
|
|
|
switch (PA.getAA()->alias(Arg, EarlierArg)) {
|
|
|
|
case AliasAnalysis::MustAlias:
|
|
|
|
Changed = true;
|
|
|
|
// If the load has a builtin retain, insert a plain retain for it.
|
|
|
|
if (Class == IC_LoadWeakRetained) {
|
2013-07-06 09:39:23 +08:00
|
|
|
Constant *Decl = EP.get(ARCRuntimeEntryPoints::EPT_Retain);
|
|
|
|
CallInst *CI = CallInst::Create(Decl, EarlierCall, "", Call);
|
2011-06-16 07:37:01 +08:00
|
|
|
CI->setTailCall();
|
|
|
|
}
|
|
|
|
// Zap the fully redundant load.
|
|
|
|
Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
|
|
|
|
Call->eraseFromParent();
|
|
|
|
goto clobbered;
|
|
|
|
case AliasAnalysis::MayAlias:
|
|
|
|
case AliasAnalysis::PartialAlias:
|
|
|
|
goto clobbered;
|
|
|
|
case AliasAnalysis::NoAlias:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case IC_MoveWeak:
|
|
|
|
case IC_CopyWeak:
|
|
|
|
// TOOD: Grab the copied value.
|
|
|
|
goto clobbered;
|
|
|
|
case IC_AutoreleasepoolPush:
|
|
|
|
case IC_None:
|
2013-03-23 05:38:36 +08:00
|
|
|
case IC_IntrinsicUser:
|
2011-06-16 07:37:01 +08:00
|
|
|
case IC_User:
|
|
|
|
// Weak pointers are only modified through the weak entry points
|
|
|
|
// (and arbitrary calls, which could call the weak entry points).
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
// Anything else could modify the weak pointer.
|
|
|
|
goto clobbered;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
clobbered:;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Then, for each destroyWeak with an alloca operand, check to see if
|
|
|
|
// the alloca and all its users can be zapped.
|
|
|
|
for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
|
|
|
|
Instruction *Inst = &*I++;
|
|
|
|
InstructionClass Class = GetBasicInstructionClass(Inst);
|
|
|
|
if (Class != IC_DestroyWeak)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
CallInst *Call = cast<CallInst>(Inst);
|
|
|
|
Value *Arg = Call->getArgOperand(0);
|
|
|
|
if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
|
2014-03-09 11:16:01 +08:00
|
|
|
for (User *U : Alloca->users()) {
|
|
|
|
const Instruction *UserInst = cast<Instruction>(U);
|
2011-06-16 07:37:01 +08:00
|
|
|
switch (GetBasicInstructionClass(UserInst)) {
|
|
|
|
case IC_InitWeak:
|
|
|
|
case IC_StoreWeak:
|
|
|
|
case IC_DestroyWeak:
|
|
|
|
continue;
|
|
|
|
default:
|
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Changed = true;
|
2014-03-09 11:16:01 +08:00
|
|
|
for (auto UI = Alloca->user_begin(), UE = Alloca->user_end(); UI != UE;) {
|
2011-06-16 07:37:01 +08:00
|
|
|
CallInst *UserInst = cast<CallInst>(*UI++);
|
2012-05-19 06:17:29 +08:00
|
|
|
switch (GetBasicInstructionClass(UserInst)) {
|
|
|
|
case IC_InitWeak:
|
|
|
|
case IC_StoreWeak:
|
|
|
|
// These functions return their second argument.
|
|
|
|
UserInst->replaceAllUsesWith(UserInst->getArgOperand(1));
|
|
|
|
break;
|
|
|
|
case IC_DestroyWeak:
|
|
|
|
// No return value.
|
|
|
|
break;
|
|
|
|
default:
|
2012-05-22 01:41:28 +08:00
|
|
|
llvm_unreachable("alloca really is used!");
|
2012-05-19 06:17:29 +08:00
|
|
|
}
|
2011-06-16 07:37:01 +08:00
|
|
|
UserInst->eraseFromParent();
|
|
|
|
}
|
|
|
|
Alloca->eraseFromParent();
|
|
|
|
done:;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Identify program paths which execute sequences of retains and releases which
|
|
|
|
/// can be eliminated.
|
2011-06-16 07:37:01 +08:00
|
|
|
bool ObjCARCOpt::OptimizeSequences(Function &F) {
|
2013-05-23 10:35:21 +08:00
|
|
|
// Releases, Retains - These are used to store the results of the main flow
|
|
|
|
// analysis. These use Value* as the key instead of Instruction* so that the
|
|
|
|
// map stays valid when we get around to rewriting code and calls get
|
|
|
|
// replaced by arguments.
|
2011-06-16 07:37:01 +08:00
|
|
|
DenseMap<Value *, RRInfo> Releases;
|
|
|
|
MapVector<Value *, RRInfo> Retains;
|
|
|
|
|
2013-05-23 10:35:21 +08:00
|
|
|
// This is used during the traversal of the function to track the
|
|
|
|
// states for each identified object at each block.
|
2011-06-16 07:37:01 +08:00
|
|
|
DenseMap<const BasicBlock *, BBState> BBStates;
|
|
|
|
|
|
|
|
// Analyze the CFG of the function, and all instructions.
|
|
|
|
bool NestingDetected = Visit(F, BBStates, Retains, Releases);
|
|
|
|
|
|
|
|
// Transform.
|
2013-05-25 04:44:02 +08:00
|
|
|
bool AnyPairsCompletelyEliminated = PerformCodePlacement(BBStates, Retains,
|
|
|
|
Releases,
|
|
|
|
F.getParent());
|
|
|
|
|
|
|
|
// Cleanup.
|
|
|
|
MultiOwnersSet.clear();
|
|
|
|
|
|
|
|
return AnyPairsCompletelyEliminated && NestingDetected;
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
|
2013-04-04 07:07:45 +08:00
|
|
|
/// Check if there is a dependent call earlier that does not have anything in
|
|
|
|
/// between the Retain and the call that can affect the reference count of their
|
|
|
|
/// shared pointer argument. Note that Retain need not be in BB.
|
2013-04-04 07:04:28 +08:00
|
|
|
static bool
|
|
|
|
HasSafePathToPredecessorCall(const Value *Arg, Instruction *Retain,
|
|
|
|
SmallPtrSet<Instruction *, 4> &DepInsts,
|
|
|
|
SmallPtrSet<const BasicBlock *, 4> &Visited,
|
|
|
|
ProvenanceAnalysis &PA) {
|
|
|
|
FindDependencies(CanChangeRetainCount, Arg, Retain->getParent(), Retain,
|
|
|
|
DepInsts, Visited, PA);
|
|
|
|
if (DepInsts.size() != 1)
|
|
|
|
return false;
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2013-04-04 07:04:28 +08:00
|
|
|
CallInst *Call =
|
|
|
|
dyn_cast_or_null<CallInst>(*DepInsts.begin());
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2013-04-04 07:04:28 +08:00
|
|
|
// Check that the pointer is the return value of the call.
|
|
|
|
if (!Call || Arg != Call)
|
|
|
|
return false;
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2013-04-04 07:04:28 +08:00
|
|
|
// Check that the call is a regular call.
|
|
|
|
InstructionClass Class = GetBasicInstructionClass(Call);
|
|
|
|
if (Class != IC_CallOrUser && Class != IC_Call)
|
|
|
|
return false;
|
2013-04-04 07:07:45 +08:00
|
|
|
|
2013-04-04 07:04:28 +08:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2013-04-04 07:16:05 +08:00
|
|
|
/// Find a dependent retain that precedes the given autorelease for which there
|
|
|
|
/// is nothing in between the two instructions that can affect the ref count of
|
|
|
|
/// Arg.
|
|
|
|
static CallInst *
|
|
|
|
FindPredecessorRetainWithSafePath(const Value *Arg, BasicBlock *BB,
|
|
|
|
Instruction *Autorelease,
|
|
|
|
SmallPtrSet<Instruction *, 4> &DepInsts,
|
|
|
|
SmallPtrSet<const BasicBlock *, 4> &Visited,
|
|
|
|
ProvenanceAnalysis &PA) {
|
|
|
|
FindDependencies(CanChangeRetainCount, Arg,
|
|
|
|
BB, Autorelease, DepInsts, Visited, PA);
|
|
|
|
if (DepInsts.size() != 1)
|
|
|
|
return 0;
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-04 07:16:05 +08:00
|
|
|
CallInst *Retain =
|
|
|
|
dyn_cast_or_null<CallInst>(*DepInsts.begin());
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-04 07:16:05 +08:00
|
|
|
// Check that we found a retain with the same argument.
|
|
|
|
if (!Retain ||
|
|
|
|
!IsRetain(GetBasicInstructionClass(Retain)) ||
|
|
|
|
GetObjCArg(Retain) != Arg) {
|
|
|
|
return 0;
|
|
|
|
}
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-04 07:16:05 +08:00
|
|
|
return Retain;
|
|
|
|
}
|
|
|
|
|
2013-04-04 07:39:14 +08:00
|
|
|
/// Look for an ``autorelease'' instruction dependent on Arg such that there are
|
|
|
|
/// no instructions dependent on Arg that need a positive ref count in between
|
|
|
|
/// the autorelease and the ret.
|
|
|
|
static CallInst *
|
|
|
|
FindPredecessorAutoreleaseWithSafePath(const Value *Arg, BasicBlock *BB,
|
|
|
|
ReturnInst *Ret,
|
|
|
|
SmallPtrSet<Instruction *, 4> &DepInsts,
|
|
|
|
SmallPtrSet<const BasicBlock *, 4> &V,
|
|
|
|
ProvenanceAnalysis &PA) {
|
|
|
|
FindDependencies(NeedsPositiveRetainCount, Arg,
|
|
|
|
BB, Ret, DepInsts, V, PA);
|
|
|
|
if (DepInsts.size() != 1)
|
|
|
|
return 0;
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-04 07:39:14 +08:00
|
|
|
CallInst *Autorelease =
|
|
|
|
dyn_cast_or_null<CallInst>(*DepInsts.begin());
|
|
|
|
if (!Autorelease)
|
|
|
|
return 0;
|
|
|
|
InstructionClass AutoreleaseClass = GetBasicInstructionClass(Autorelease);
|
|
|
|
if (!IsAutorelease(AutoreleaseClass))
|
|
|
|
return 0;
|
|
|
|
if (GetObjCArg(Autorelease) != Arg)
|
|
|
|
return 0;
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-04 07:39:14 +08:00
|
|
|
return Autorelease;
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// Look for this pattern:
|
2012-09-14 22:57:36 +08:00
|
|
|
/// \code
|
2011-06-16 07:37:01 +08:00
|
|
|
/// %call = call i8* @something(...)
|
|
|
|
/// %2 = call i8* @objc_retain(i8* %call)
|
|
|
|
/// %3 = call i8* @objc_autorelease(i8* %2)
|
|
|
|
/// ret i8* %3
|
2012-09-14 22:57:36 +08:00
|
|
|
/// \endcode
|
2011-06-16 07:37:01 +08:00
|
|
|
/// And delete the retain and autorelease.
|
|
|
|
void ObjCARCOpt::OptimizeReturns(Function &F) {
|
|
|
|
if (!F.getReturnType()->isPointerTy())
|
|
|
|
return;
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeReturns ==\n");
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
SmallPtrSet<Instruction *, 4> DependingInstructions;
|
|
|
|
SmallPtrSet<const BasicBlock *, 4> Visited;
|
|
|
|
for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
|
|
|
|
BasicBlock *BB = FI;
|
|
|
|
ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back());
|
2013-01-02 00:05:48 +08:00
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "Visiting: " << *Ret << "\n");
|
2013-01-02 00:05:48 +08:00
|
|
|
|
2013-04-04 07:39:14 +08:00
|
|
|
if (!Ret)
|
|
|
|
continue;
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
const Value *Arg = StripPointerCastsAndObjCCalls(Ret->getOperand(0));
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-21 08:25:04 +08:00
|
|
|
// Look for an ``autorelease'' instruction that is a predecessor of Ret and
|
2013-04-04 07:39:14 +08:00
|
|
|
// dependent on Arg such that there are no instructions dependent on Arg
|
|
|
|
// that need a positive ref count in between the autorelease and Ret.
|
|
|
|
CallInst *Autorelease =
|
|
|
|
FindPredecessorAutoreleaseWithSafePath(Arg, BB, Ret,
|
|
|
|
DependingInstructions, Visited,
|
|
|
|
PA);
|
2013-04-21 08:25:01 +08:00
|
|
|
DependingInstructions.clear();
|
|
|
|
Visited.clear();
|
|
|
|
|
|
|
|
if (!Autorelease)
|
|
|
|
continue;
|
2013-04-06 07:46:45 +08:00
|
|
|
|
2013-04-21 08:25:01 +08:00
|
|
|
CallInst *Retain =
|
|
|
|
FindPredecessorRetainWithSafePath(Arg, BB, Autorelease,
|
|
|
|
DependingInstructions, Visited, PA);
|
2011-06-16 07:37:01 +08:00
|
|
|
DependingInstructions.clear();
|
|
|
|
Visited.clear();
|
2013-04-21 08:25:01 +08:00
|
|
|
|
|
|
|
if (!Retain)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// Check that there is nothing that can affect the reference count
|
|
|
|
// between the retain and the call. Note that Retain need not be in BB.
|
|
|
|
bool HasSafePathToCall = HasSafePathToPredecessorCall(Arg, Retain,
|
|
|
|
DependingInstructions,
|
|
|
|
Visited, PA);
|
|
|
|
DependingInstructions.clear();
|
|
|
|
Visited.clear();
|
|
|
|
|
|
|
|
if (!HasSafePathToCall)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// If so, we can zap the retain and autorelease.
|
|
|
|
Changed = true;
|
|
|
|
++NumRets;
|
|
|
|
DEBUG(dbgs() << "Erasing: " << *Retain << "\nErasing: "
|
|
|
|
<< *Autorelease << "\n");
|
|
|
|
EraseInstruction(Retain);
|
|
|
|
EraseInstruction(Autorelease);
|
2011-06-16 07:37:01 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-04-29 14:16:57 +08:00
|
|
|
#ifndef NDEBUG
|
|
|
|
void
|
|
|
|
ObjCARCOpt::GatherStatistics(Function &F, bool AfterOptimization) {
|
|
|
|
llvm::Statistic &NumRetains =
|
|
|
|
AfterOptimization? NumRetainsAfterOpt : NumRetainsBeforeOpt;
|
|
|
|
llvm::Statistic &NumReleases =
|
|
|
|
AfterOptimization? NumReleasesAfterOpt : NumReleasesBeforeOpt;
|
|
|
|
|
|
|
|
for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
|
|
|
|
Instruction *Inst = &*I++;
|
|
|
|
switch (GetBasicInstructionClass(Inst)) {
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
case IC_Retain:
|
|
|
|
++NumRetains;
|
|
|
|
break;
|
|
|
|
case IC_Release:
|
|
|
|
++NumReleases;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
bool ObjCARCOpt::doInitialization(Module &M) {
|
|
|
|
if (!EnableARCOpts)
|
|
|
|
return false;
|
|
|
|
|
2012-04-14 02:57:48 +08:00
|
|
|
// If nothing in the Module uses ARC, don't do anything.
|
2011-06-21 07:20:43 +08:00
|
|
|
Run = ModuleHasARC(M);
|
|
|
|
if (!Run)
|
|
|
|
return false;
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// Identify the imprecise release metadata kind.
|
|
|
|
ImpreciseReleaseMDKind =
|
|
|
|
M.getContext().getMDKindID("clang.imprecise_release");
|
2011-10-18 06:53:25 +08:00
|
|
|
CopyOnEscapeMDKind =
|
|
|
|
M.getContext().getMDKindID("clang.arc.copy_on_escape");
|
2012-02-18 02:59:53 +08:00
|
|
|
NoObjCARCExceptionsMDKind =
|
|
|
|
M.getContext().getMDKindID("clang.arc.no_objc_arc_exceptions");
|
[ObjCARC Annotations] Implemented ARC annotation metadata to expose the ARC data flow analysis state in the IR via metadata.
Previously the inner works of the data flow analysis in ObjCARCOpts was hard to
get out of the optimizer for analysis of bugs or testing. All of the current ARC
unit tests are based off of testing the effect of the data flow
analysis (i.e. what statements are removed or moved, etc.). This creates
weakness in the current unit testing regimem since we are not actually testing
what effects various instructions have on the modeled pointer state.
Additionally in order to analyze a bug in the optimizer, one would need to track
by hand what the optimizer was actually doing either through use of DEBUG
statements or through the usage of a debugger, both yielding large loses in
developer productivity.
This patch deals with these two issues by providing ARC annotation
metadata that annotates instructions with the state changes that they cause in
various pointers as well as provides metadata to annotate provenance sources.
Specifically, we introduce the following metadata types:
1. llvm.arc.annotation.bottomup.
2. llvm.arc.annotation.topdown.
3. llvm.arc.annotation.provenancesource.
llvm.arc.annotation.{bottomup,topdown}: These annotations describes a state
change in a pointer when we are visiting instructions bottomup/topdown
respectively. The output format for both is the same:
!1 = metadata !{metadata !"(test,%x)", metadata !"S_Release", metadata !"S_Use"}
The first element is a string tuple with the following format:
(function,variable name)
The second two elements of the metadata show the previous state of the
pointer (in this case S_Release) and the new state of the pointer (S_Use). We
write the metadata in such a manner to ensure that it is easy for outside tools
to parse. This is important since I am currently working on a tool for taking
this information and pretty printing it besides the IR and that can be used for
LIT style testing via the generation of an index.
llvm.arc.annotation.provenancesource: This metadata is used to annotate
instructions which act as provenance sources, i.e. ones that introduce a
new (from the optimizer's perspective) non-argument pointer to track. This
enables cross-referencing in between provenance sources and the state changes
that occur to them.
This is still a work in progress. Additionally I plan on committing
later today additions to the annotations that annotate at the top/bottom
of basic blocks the state of the various pointers being tracked.
*NOTE* The metadata support is conditionally compiled into libObjCARCOpts only
when we are producing a debug build of llvm/clang and even so are
disabled by default. To enable the annotation metadata, pass in
-enable-objc-arc-annotations to opt.
llvm-svn: 177951
2013-03-26 08:42:04 +08:00
|
|
|
#ifdef ARC_ANNOTATIONS
|
|
|
|
ARCAnnotationBottomUpMDKind =
|
|
|
|
M.getContext().getMDKindID("llvm.arc.annotation.bottomup");
|
|
|
|
ARCAnnotationTopDownMDKind =
|
|
|
|
M.getContext().getMDKindID("llvm.arc.annotation.topdown");
|
|
|
|
ARCAnnotationProvenanceSourceMDKind =
|
|
|
|
M.getContext().getMDKindID("llvm.arc.annotation.provenancesource");
|
|
|
|
#endif // ARC_ANNOTATIONS
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
// Intuitively, objc_retain and others are nocapture, however in practice
|
|
|
|
// they are not, because they return their argument value. And objc_release
|
2012-04-28 02:56:31 +08:00
|
|
|
// calls finalizers which can have arbitrary side effects.
|
2011-06-16 07:37:01 +08:00
|
|
|
|
2013-07-06 09:39:23 +08:00
|
|
|
// Initialize our runtime entry point cache.
|
|
|
|
EP.Initialize(&M);
|
2011-06-16 07:37:01 +08:00
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool ObjCARCOpt::runOnFunction(Function &F) {
|
|
|
|
if (!EnableARCOpts)
|
|
|
|
return false;
|
|
|
|
|
2011-06-21 07:20:43 +08:00
|
|
|
// If nothing in the Module uses ARC, don't do anything.
|
|
|
|
if (!Run)
|
|
|
|
return false;
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
Changed = false;
|
|
|
|
|
2013-04-06 02:10:41 +08:00
|
|
|
DEBUG(dbgs() << "<<< ObjCARCOpt: Visiting Function: " << F.getName() << " >>>"
|
|
|
|
"\n");
|
2013-01-12 10:57:16 +08:00
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
PA.setAA(&getAnalysis<AliasAnalysis>());
|
|
|
|
|
2013-05-14 02:29:07 +08:00
|
|
|
#ifndef NDEBUG
|
|
|
|
if (AreStatisticsEnabled()) {
|
|
|
|
GatherStatistics(F, false);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
// This pass performs several distinct transformations. As a compile-time aid
|
|
|
|
// when compiling code that isn't ObjC, skip these if the relevant ObjC
|
|
|
|
// library functions aren't declared.
|
|
|
|
|
2013-04-25 06:18:15 +08:00
|
|
|
// Preliminary optimizations. This also computes UsedInThisFunction.
|
2011-06-16 07:37:01 +08:00
|
|
|
OptimizeIndividualCalls(F);
|
|
|
|
|
|
|
|
// Optimizations for weak pointers.
|
|
|
|
if (UsedInThisFunction & ((1 << IC_LoadWeak) |
|
|
|
|
(1 << IC_LoadWeakRetained) |
|
|
|
|
(1 << IC_StoreWeak) |
|
|
|
|
(1 << IC_InitWeak) |
|
|
|
|
(1 << IC_CopyWeak) |
|
|
|
|
(1 << IC_MoveWeak) |
|
|
|
|
(1 << IC_DestroyWeak)))
|
|
|
|
OptimizeWeakCalls(F);
|
|
|
|
|
|
|
|
// Optimizations for retain+release pairs.
|
|
|
|
if (UsedInThisFunction & ((1 << IC_Retain) |
|
|
|
|
(1 << IC_RetainRV) |
|
|
|
|
(1 << IC_RetainBlock)))
|
|
|
|
if (UsedInThisFunction & (1 << IC_Release))
|
|
|
|
// Run OptimizeSequences until it either stops making changes or
|
|
|
|
// no retain+release pair nesting is detected.
|
|
|
|
while (OptimizeSequences(F)) {}
|
|
|
|
|
|
|
|
// Optimizations if objc_autorelease is used.
|
2012-05-09 07:39:44 +08:00
|
|
|
if (UsedInThisFunction & ((1 << IC_Autorelease) |
|
|
|
|
(1 << IC_AutoreleaseRV)))
|
2011-06-16 07:37:01 +08:00
|
|
|
OptimizeReturns(F);
|
|
|
|
|
2013-04-29 14:16:57 +08:00
|
|
|
// Gather statistics after optimization.
|
|
|
|
#ifndef NDEBUG
|
|
|
|
if (AreStatisticsEnabled()) {
|
|
|
|
GatherStatistics(F, true);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2013-01-12 10:57:16 +08:00
|
|
|
DEBUG(dbgs() << "\n");
|
|
|
|
|
2011-06-16 07:37:01 +08:00
|
|
|
return Changed;
|
|
|
|
}
|
|
|
|
|
|
|
|
void ObjCARCOpt::releaseMemory() {
|
|
|
|
PA.clear();
|
|
|
|
}
|
|
|
|
|
2013-01-14 08:35:14 +08:00
|
|
|
/// @}
|
|
|
|
///
|