forked from OSchip/llvm-project
Revert "Use llvm::lower_bound. NFC"
This reverts commit rL358161. This patch have broken the test: llvm/test/tools/llvm-exegesis/X86/uops-CMOV16rm-noreg.s llvm-svn: 358199
This commit is contained in:
parent
528b01e998
commit
7822b46188
|
@ -462,8 +462,9 @@ public:
|
|||
DWARFDie getDIEForOffset(uint32_t Offset) {
|
||||
extractDIEsIfNeeded(false);
|
||||
assert(!DieArray.empty());
|
||||
auto it = llvm::lower_bound(
|
||||
DieArray, Offset, [](const DWARFDebugInfoEntry &LHS, uint32_t Offset) {
|
||||
auto it = std::lower_bound(
|
||||
DieArray.begin(), DieArray.end(), Offset,
|
||||
[](const DWARFDebugInfoEntry &LHS, uint32_t Offset) {
|
||||
return LHS.getOffset() < Offset;
|
||||
});
|
||||
if (it != DieArray.end() && it->getOffset() == Offset)
|
||||
|
|
|
@ -1497,8 +1497,9 @@ bool TargetLibraryInfoImpl::isFunctionVectorizable(StringRef funcName) const {
|
|||
if (funcName.empty())
|
||||
return false;
|
||||
|
||||
std::vector<VecDesc>::const_iterator I =
|
||||
llvm::lower_bound(VectorDescs, funcName, compareWithScalarFnName);
|
||||
std::vector<VecDesc>::const_iterator I = std::lower_bound(
|
||||
VectorDescs.begin(), VectorDescs.end(), funcName,
|
||||
compareWithScalarFnName);
|
||||
return I != VectorDescs.end() && StringRef(I->ScalarFnName) == funcName;
|
||||
}
|
||||
|
||||
|
@ -1507,8 +1508,8 @@ StringRef TargetLibraryInfoImpl::getVectorizedFunction(StringRef F,
|
|||
F = sanitizeFunctionName(F);
|
||||
if (F.empty())
|
||||
return F;
|
||||
std::vector<VecDesc>::const_iterator I =
|
||||
llvm::lower_bound(VectorDescs, F, compareWithScalarFnName);
|
||||
std::vector<VecDesc>::const_iterator I = std::lower_bound(
|
||||
VectorDescs.begin(), VectorDescs.end(), F, compareWithScalarFnName);
|
||||
while (I != VectorDescs.end() && StringRef(I->ScalarFnName) == F) {
|
||||
if (I->VectorizationFactor == VF)
|
||||
return I->VectorFnName;
|
||||
|
@ -1523,8 +1524,8 @@ StringRef TargetLibraryInfoImpl::getScalarizedFunction(StringRef F,
|
|||
if (F.empty())
|
||||
return F;
|
||||
|
||||
std::vector<VecDesc>::const_iterator I =
|
||||
llvm::lower_bound(ScalarDescs, F, compareWithVectorFnName);
|
||||
std::vector<VecDesc>::const_iterator I = std::lower_bound(
|
||||
ScalarDescs.begin(), ScalarDescs.end(), F, compareWithVectorFnName);
|
||||
if (I == VectorDescs.end() || StringRef(I->VectorFnName) != F)
|
||||
return StringRef();
|
||||
VF = I->VectorizationFactor;
|
||||
|
|
|
@ -180,8 +180,8 @@ void BitcodeReaderValueList::resolveConstantForwardRefs() {
|
|||
NewOp = RealVal;
|
||||
} else {
|
||||
// Otherwise, look up the placeholder in ResolveConstants.
|
||||
ResolveConstantsTy::iterator It = llvm::lower_bound(
|
||||
ResolveConstants,
|
||||
ResolveConstantsTy::iterator It = std::lower_bound(
|
||||
ResolveConstants.begin(), ResolveConstants.end(),
|
||||
std::pair<Constant *, unsigned>(cast<Constant>(*I), 0));
|
||||
assert(It != ResolveConstants.end() && It->first == *I);
|
||||
NewOp = operator[](It->second);
|
||||
|
|
|
@ -535,7 +535,8 @@ bool IRTranslator::translateExtractValue(const User &U,
|
|||
uint64_t Offset = getOffsetFromIndices(U, *DL);
|
||||
ArrayRef<unsigned> SrcRegs = getOrCreateVRegs(*Src);
|
||||
ArrayRef<uint64_t> Offsets = *VMap.getOffsets(*Src);
|
||||
unsigned Idx = llvm::lower_bound(Offsets, Offset) - Offsets.begin();
|
||||
unsigned Idx = std::lower_bound(Offsets.begin(), Offsets.end(), Offset) -
|
||||
Offsets.begin();
|
||||
auto &DstRegs = allocateVRegs(U);
|
||||
|
||||
for (unsigned i = 0; i < DstRegs.size(); ++i)
|
||||
|
|
|
@ -2262,8 +2262,8 @@ unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
|
|||
ArrayRef<SlotIndex> RMS = LIS->getRegMaskSlotsInBlock(BI.MBB->getNumber());
|
||||
LLVM_DEBUG(dbgs() << RMS.size() << " regmasks in block:");
|
||||
// Constrain to VirtReg's live range.
|
||||
unsigned ri =
|
||||
llvm::lower_bound(RMS, Uses.front().getRegSlot()) - RMS.begin();
|
||||
unsigned ri = std::lower_bound(RMS.begin(), RMS.end(),
|
||||
Uses.front().getRegSlot()) - RMS.begin();
|
||||
unsigned re = RMS.size();
|
||||
for (unsigned i = 0; i != NumGaps && ri != re; ++i) {
|
||||
// Look for Uses[i] <= RMS <= Uses[i+1].
|
||||
|
|
|
@ -57,8 +57,8 @@ void DWARFDebugLoc::LocationList::dump(raw_ostream &OS, bool IsLittleEndian,
|
|||
|
||||
DWARFDebugLoc::LocationList const *
|
||||
DWARFDebugLoc::getLocationListAtOffset(uint64_t Offset) const {
|
||||
auto It = llvm::lower_bound(
|
||||
Locations, Offset,
|
||||
auto It = std::lower_bound(
|
||||
Locations.begin(), Locations.end(), Offset,
|
||||
[](const LocationList &L, uint64_t Offset) { return L.Offset < Offset; });
|
||||
if (It != Locations.end() && It->Offset == Offset)
|
||||
return &(*It);
|
||||
|
@ -213,8 +213,8 @@ void DWARFDebugLoclists::parse(DataExtractor data, unsigned Version) {
|
|||
|
||||
DWARFDebugLoclists::LocationList const *
|
||||
DWARFDebugLoclists::getLocationListAtOffset(uint64_t Offset) const {
|
||||
auto It = llvm::lower_bound(
|
||||
Locations, Offset,
|
||||
auto It = std::lower_bound(
|
||||
Locations.begin(), Locations.end(), Offset,
|
||||
[](const LocationList &L, uint64_t Offset) { return L.Offset < Offset; });
|
||||
if (It != Locations.end() && It->Offset == Offset)
|
||||
return &(*It);
|
||||
|
|
|
@ -62,8 +62,9 @@ static bool IsInRanges(const IntRange &R,
|
|||
// Find the first range whose High field is >= R.High,
|
||||
// then check if the Low field is <= R.Low. If so, we
|
||||
// have a Range that covers R.
|
||||
auto I = llvm::lower_bound(
|
||||
Ranges, R, [](IntRange A, IntRange B) { return A.High < B.High; });
|
||||
auto I = std::lower_bound(
|
||||
Ranges.begin(), Ranges.end(), R,
|
||||
[](const IntRange &A, const IntRange &B) { return A.High < B.High; });
|
||||
return I != Ranges.end() && I->Low <= R.Low;
|
||||
}
|
||||
|
||||
|
|
|
@ -487,10 +487,11 @@ static bool promoteSingleBlockAlloca(AllocaInst *AI, const AllocaInfo &Info,
|
|||
unsigned LoadIdx = LBI.getInstructionIndex(LI);
|
||||
|
||||
// Find the nearest store that has a lower index than this load.
|
||||
StoresByIndexTy::iterator I = llvm::lower_bound(
|
||||
StoresByIndex,
|
||||
std::make_pair(LoadIdx, static_cast<StoreInst *>(nullptr)),
|
||||
less_first());
|
||||
StoresByIndexTy::iterator I =
|
||||
std::lower_bound(StoresByIndex.begin(), StoresByIndex.end(),
|
||||
std::make_pair(LoadIdx,
|
||||
static_cast<StoreInst *>(nullptr)),
|
||||
less_first());
|
||||
if (I == StoresByIndex.begin()) {
|
||||
if (StoresByIndex.empty())
|
||||
// If there are no stores, the load takes the undef value.
|
||||
|
@ -757,8 +758,9 @@ void PromoteMem2Reg::run() {
|
|||
// them from the Preds list.
|
||||
for (unsigned i = 0, e = SomePHI->getNumIncomingValues(); i != e; ++i) {
|
||||
// Do a log(n) search of the Preds list for the entry we want.
|
||||
SmallVectorImpl<BasicBlock *>::iterator EntIt = llvm::lower_bound(
|
||||
Preds, SomePHI->getIncomingBlock(i), CompareBBNumbers);
|
||||
SmallVectorImpl<BasicBlock *>::iterator EntIt = std::lower_bound(
|
||||
Preds.begin(), Preds.end(), SomePHI->getIncomingBlock(i),
|
||||
CompareBBNumbers);
|
||||
assert(EntIt != Preds.end() && *EntIt == SomePHI->getIncomingBlock(i) &&
|
||||
"PHI node has entry for a block which is not a predecessor!");
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
static constexpr const char kIntegerPrefix[] = "i_0x";
|
||||
static constexpr const char kDoublePrefix[] = "f_";
|
||||
static constexpr const char kInvalidOperand[] = "INVALID";
|
||||
static constexpr llvm::StringLiteral kNoRegister("%noreg");
|
||||
|
||||
namespace llvm {
|
||||
|
||||
|
@ -47,7 +48,9 @@ struct YamlContext {
|
|||
llvm::StringMap<unsigned>
|
||||
generateRegNameToRegNoMapping(const llvm::MCRegisterInfo &RegInfo) {
|
||||
llvm::StringMap<unsigned> Map(RegInfo.getNumRegs());
|
||||
for (unsigned I = 0, E = RegInfo.getNumRegs(); I < E; ++I)
|
||||
// Special-case RegNo 0, which would otherwise be spelled as ''.
|
||||
Map[kNoRegister] = 0;
|
||||
for (unsigned I = 1, E = RegInfo.getNumRegs(); I < E; ++I)
|
||||
Map[RegInfo.getName(I)] = I;
|
||||
assert(Map.size() == RegInfo.getNumRegs() && "Size prediction failed");
|
||||
return Map;
|
||||
|
@ -83,18 +86,21 @@ struct YamlContext {
|
|||
llvm::raw_string_ostream &getErrorStream() { return ErrorStream; }
|
||||
|
||||
llvm::StringRef getRegName(unsigned RegNo) {
|
||||
// Special case: RegNo 0 is NoRegister. We have to deal with it explicitly.
|
||||
if (RegNo == 0)
|
||||
return kNoRegister;
|
||||
const llvm::StringRef RegName = State->getRegInfo().getName(RegNo);
|
||||
if (RegName.empty())
|
||||
ErrorStream << "No register with enum value '" << RegNo << "'\n";
|
||||
return RegName;
|
||||
}
|
||||
|
||||
unsigned getRegNo(llvm::StringRef RegName) {
|
||||
llvm::Optional<unsigned> getRegNo(llvm::StringRef RegName) {
|
||||
auto Iter = RegNameToRegNo.find(RegName);
|
||||
if (Iter != RegNameToRegNo.end())
|
||||
return Iter->second;
|
||||
ErrorStream << "No register with name '" << RegName << "'\n";
|
||||
return 0;
|
||||
return llvm::None;
|
||||
}
|
||||
|
||||
private:
|
||||
|
@ -142,8 +148,8 @@ private:
|
|||
return llvm::MCOperand::createImm(IntValue);
|
||||
if (tryDeserializeFPOperand(String, DoubleValue))
|
||||
return llvm::MCOperand::createFPImm(DoubleValue);
|
||||
if (unsigned RegNo = getRegNo(String))
|
||||
return llvm::MCOperand::createReg(RegNo);
|
||||
if (auto RegNo = getRegNo(String))
|
||||
return llvm::MCOperand::createReg(*RegNo);
|
||||
if (String != kInvalidOperand)
|
||||
ErrorStream << "Unknown Operand: '" << String << "'\n";
|
||||
return {};
|
||||
|
@ -258,8 +264,9 @@ template <> struct ScalarTraits<exegesis::RegisterValue> {
|
|||
String.split(Pieces, "=0x", /* MaxSplit */ -1,
|
||||
/* KeepEmpty */ false);
|
||||
YamlContext &Context = getTypedContext(Ctx);
|
||||
if (Pieces.size() == 2) {
|
||||
RV.Register = Context.getRegNo(Pieces[0]);
|
||||
llvm::Optional<unsigned> RegNo;
|
||||
if (Pieces.size() == 2 && (RegNo = Context.getRegNo(Pieces[0]))) {
|
||||
RV.Register = *RegNo;
|
||||
const unsigned BitsNeeded = llvm::APInt::getBitsNeeded(Pieces[1], kRadix);
|
||||
RV.Value = llvm::APInt(BitsNeeded, Pieces[1], kRadix);
|
||||
} else {
|
||||
|
|
Loading…
Reference in New Issue