forked from OSchip/llvm-project
Replace std::copy with a back inserter with vector append where feasible
All of the cases were just appending from random access iterators to a vector. Using insert/append can grow the vector to the perfect size directly and moves the growing out of the loop. No intended functionalty change. llvm-svn: 230845
This commit is contained in:
parent
b759340fc8
commit
4f6ac16292
|
@ -78,7 +78,7 @@ inline void addNodeToInterval(Interval *Int, BasicBlock *BB) {
|
||||||
//
|
//
|
||||||
inline void addNodeToInterval(Interval *Int, Interval *I) {
|
inline void addNodeToInterval(Interval *Int, Interval *I) {
|
||||||
// Add all of the nodes in I as new nodes in Int.
|
// Add all of the nodes in I as new nodes in Int.
|
||||||
copy(I->Nodes.begin(), I->Nodes.end(), back_inserter(Int->Nodes));
|
Int->Nodes.insert(Int->Nodes.end(), I->Nodes.begin(), I->Nodes.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -312,8 +312,7 @@ static const Value *getNoopInput(const Value *V,
|
||||||
// previous aggregate. Combine the two paths to obtain the true address of
|
// previous aggregate. Combine the two paths to obtain the true address of
|
||||||
// our element.
|
// our element.
|
||||||
ArrayRef<unsigned> ExtractLoc = EVI->getIndices();
|
ArrayRef<unsigned> ExtractLoc = EVI->getIndices();
|
||||||
std::copy(ExtractLoc.rbegin(), ExtractLoc.rend(),
|
ValLoc.append(ExtractLoc.rbegin(), ExtractLoc.rend());
|
||||||
std::back_inserter(ValLoc));
|
|
||||||
NoopInput = Op;
|
NoopInput = Op;
|
||||||
}
|
}
|
||||||
// Terminate if we couldn't find anything to look through.
|
// Terminate if we couldn't find anything to look through.
|
||||||
|
@ -601,10 +600,8 @@ bool llvm::returnTypeIsEligibleForTailCall(const Function *F,
|
||||||
// The manipulations performed when we're looking through an insertvalue or
|
// The manipulations performed when we're looking through an insertvalue or
|
||||||
// an extractvalue would happen at the front of the RetPath list, so since
|
// an extractvalue would happen at the front of the RetPath list, so since
|
||||||
// we have to copy it anyway it's more efficient to create a reversed copy.
|
// we have to copy it anyway it's more efficient to create a reversed copy.
|
||||||
using std::copy;
|
SmallVector<unsigned, 4> TmpRetPath(RetPath.rbegin(), RetPath.rend());
|
||||||
SmallVector<unsigned, 4> TmpRetPath, TmpCallPath;
|
SmallVector<unsigned, 4> TmpCallPath(CallPath.rbegin(), CallPath.rend());
|
||||||
copy(RetPath.rbegin(), RetPath.rend(), std::back_inserter(TmpRetPath));
|
|
||||||
copy(CallPath.rbegin(), CallPath.rend(), std::back_inserter(TmpCallPath));
|
|
||||||
|
|
||||||
// Finally, we can check whether the value produced by the tail call at this
|
// Finally, we can check whether the value produced by the tail call at this
|
||||||
// index is compatible with the value we return.
|
// index is compatible with the value we return.
|
||||||
|
|
|
@ -1555,7 +1555,7 @@ void IfConverter::PredicateBlock(BBInfo &BBI,
|
||||||
UpdatePredRedefs(I, Redefs);
|
UpdatePredRedefs(I, Redefs);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::copy(Cond.begin(), Cond.end(), std::back_inserter(BBI.Predicate));
|
BBI.Predicate.append(Cond.begin(), Cond.end());
|
||||||
|
|
||||||
BBI.IsAnalyzed = false;
|
BBI.IsAnalyzed = false;
|
||||||
BBI.NonPredSize = 0;
|
BBI.NonPredSize = 0;
|
||||||
|
@ -1620,9 +1620,8 @@ void IfConverter::CopyAndPredicateBlock(BBInfo &ToBBI, BBInfo &FromBBI,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::copy(FromBBI.Predicate.begin(), FromBBI.Predicate.end(),
|
ToBBI.Predicate.append(FromBBI.Predicate.begin(), FromBBI.Predicate.end());
|
||||||
std::back_inserter(ToBBI.Predicate));
|
ToBBI.Predicate.append(Cond.begin(), Cond.end());
|
||||||
std::copy(Cond.begin(), Cond.end(), std::back_inserter(ToBBI.Predicate));
|
|
||||||
|
|
||||||
ToBBI.ClobbersPred |= FromBBI.ClobbersPred;
|
ToBBI.ClobbersPred |= FromBBI.ClobbersPred;
|
||||||
ToBBI.IsAnalyzed = false;
|
ToBBI.IsAnalyzed = false;
|
||||||
|
@ -1661,8 +1660,7 @@ void IfConverter::MergeBlocks(BBInfo &ToBBI, BBInfo &FromBBI, bool AddEdges) {
|
||||||
if (NBB && !FromBBI.BB->isSuccessor(NBB))
|
if (NBB && !FromBBI.BB->isSuccessor(NBB))
|
||||||
FromBBI.BB->addSuccessor(NBB);
|
FromBBI.BB->addSuccessor(NBB);
|
||||||
|
|
||||||
std::copy(FromBBI.Predicate.begin(), FromBBI.Predicate.end(),
|
ToBBI.Predicate.append(FromBBI.Predicate.begin(), FromBBI.Predicate.end());
|
||||||
std::back_inserter(ToBBI.Predicate));
|
|
||||||
FromBBI.Predicate.clear();
|
FromBBI.Predicate.clear();
|
||||||
|
|
||||||
ToBBI.NonPredSize += FromBBI.NonPredSize;
|
ToBBI.NonPredSize += FromBBI.NonPredSize;
|
||||||
|
|
|
@ -411,8 +411,7 @@ optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB,
|
||||||
|
|
||||||
if (ExtendLife && !ExtendedUses.empty())
|
if (ExtendLife && !ExtendedUses.empty())
|
||||||
// Extend the liveness of the extension result.
|
// Extend the liveness of the extension result.
|
||||||
std::copy(ExtendedUses.begin(), ExtendedUses.end(),
|
Uses.append(ExtendedUses.begin(), ExtendedUses.end());
|
||||||
std::back_inserter(Uses));
|
|
||||||
|
|
||||||
// Now replace all uses.
|
// Now replace all uses.
|
||||||
bool Changed = false;
|
bool Changed = false;
|
||||||
|
|
|
@ -67,8 +67,7 @@ void DWARFDebugLoc::parse(DataExtractor data, unsigned AddressSize) {
|
||||||
// A single location description describing the location of the object...
|
// A single location description describing the location of the object...
|
||||||
StringRef str = data.getData().substr(Offset, Bytes);
|
StringRef str = data.getData().substr(Offset, Bytes);
|
||||||
Offset += Bytes;
|
Offset += Bytes;
|
||||||
E.Loc.reserve(str.size());
|
E.Loc.append(str.begin(), str.end());
|
||||||
std::copy(str.begin(), str.end(), std::back_inserter(E.Loc));
|
|
||||||
Loc.Entries.push_back(std::move(E));
|
Loc.Entries.push_back(std::move(E));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5559,7 +5559,7 @@ X86InstrInfo::unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N,
|
||||||
}
|
}
|
||||||
if (Load)
|
if (Load)
|
||||||
BeforeOps.push_back(SDValue(Load, 0));
|
BeforeOps.push_back(SDValue(Load, 0));
|
||||||
std::copy(AfterOps.begin(), AfterOps.end(), std::back_inserter(BeforeOps));
|
BeforeOps.insert(BeforeOps.end(), AfterOps.begin(), AfterOps.end());
|
||||||
SDNode *NewNode= DAG.getMachineNode(Opc, dl, VTs, BeforeOps);
|
SDNode *NewNode= DAG.getMachineNode(Opc, dl, VTs, BeforeOps);
|
||||||
NewNodes.push_back(NewNode);
|
NewNodes.push_back(NewNode);
|
||||||
|
|
||||||
|
|
|
@ -247,7 +247,7 @@ public:
|
||||||
/// hold.
|
/// hold.
|
||||||
void insert(ArrayRef<Slice> NewSlices) {
|
void insert(ArrayRef<Slice> NewSlices) {
|
||||||
int OldSize = Slices.size();
|
int OldSize = Slices.size();
|
||||||
std::move(NewSlices.begin(), NewSlices.end(), std::back_inserter(Slices));
|
Slices.append(NewSlices.begin(), NewSlices.end());
|
||||||
auto SliceI = Slices.begin() + OldSize;
|
auto SliceI = Slices.begin() + OldSize;
|
||||||
std::sort(SliceI, Slices.end());
|
std::sort(SliceI, Slices.end());
|
||||||
std::inplace_merge(Slices.begin(), SliceI, Slices.end());
|
std::inplace_merge(Slices.begin(), SliceI, Slices.end());
|
||||||
|
|
|
@ -1447,8 +1447,9 @@ void AsmMatcherInfo::buildInfo() {
|
||||||
II->buildAliasResultOperands();
|
II->buildAliasResultOperands();
|
||||||
}
|
}
|
||||||
if (!NewMatchables.empty())
|
if (!NewMatchables.empty())
|
||||||
std::move(NewMatchables.begin(), NewMatchables.end(),
|
Matchables.insert(Matchables.end(),
|
||||||
std::back_inserter(Matchables));
|
std::make_move_iterator(NewMatchables.begin()),
|
||||||
|
std::make_move_iterator(NewMatchables.end()));
|
||||||
|
|
||||||
// Process token alias definitions and set up the associated superclass
|
// Process token alias definitions and set up the associated superclass
|
||||||
// information.
|
// information.
|
||||||
|
|
Loading…
Reference in New Issue