Skip DBG_VALUE many places in live intervals and

register coalescing.  This fixes many crashes and
places where debug info affects codegen (when
dbg.value is lowered to machine instructions, which
it isn't yet in TOT).

llvm-svn: 95739
This commit is contained in:
Dale Johannesen 2010-02-10 00:55:42 +00:00
parent 74e6852510
commit f8f9f55468
2 changed files with 57 additions and 38 deletions

View File

@ -512,6 +512,8 @@ void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
baseIndex = baseIndex.getNextIndex(); baseIndex = baseIndex.getNextIndex();
while (++mi != MBB->end()) { while (++mi != MBB->end()) {
if (mi->isDebugValue())
continue;
if (getInstructionFromIndex(baseIndex) == 0) if (getInstructionFromIndex(baseIndex) == 0)
baseIndex = indexes_->getNextNonNullIndex(baseIndex); baseIndex = indexes_->getNextNonNullIndex(baseIndex);
@ -527,8 +529,8 @@ void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
end = baseIndex.getDefIndex(); end = baseIndex.getDefIndex();
} else { } else {
// Another instruction redefines the register before it is ever read. // Another instruction redefines the register before it is ever read.
// Then the register is essentially dead at the instruction that defines // Then the register is essentially dead at the instruction that
// it. Hence its interval is: // defines it. Hence its interval is:
// [defSlot(def), defSlot(def)+1) // [defSlot(def), defSlot(def)+1)
DEBUG(dbgs() << " dead"); DEBUG(dbgs() << " dead");
end = start.getStoreIndex(); end = start.getStoreIndex();
@ -607,25 +609,27 @@ void LiveIntervals::handleLiveInRegister(MachineBasicBlock *MBB,
SlotIndex end = baseIndex; SlotIndex end = baseIndex;
bool SeenDefUse = false; bool SeenDefUse = false;
while (mi != MBB->end()) { MachineBasicBlock::iterator E = MBB->end();
if (mi->killsRegister(interval.reg, tri_)) { while (mi != E) {
DEBUG(dbgs() << " killed"); if (!mi->isDebugValue()) {
end = baseIndex.getDefIndex(); if (mi->killsRegister(interval.reg, tri_)) {
SeenDefUse = true; DEBUG(dbgs() << " killed");
break; end = baseIndex.getDefIndex();
} else if (mi->modifiesRegister(interval.reg, tri_)) { SeenDefUse = true;
// Another instruction redefines the register before it is ever read. break;
// Then the register is essentially dead at the instruction that defines } else if (mi->modifiesRegister(interval.reg, tri_)) {
// it. Hence its interval is: // Another instruction redefines the register before it is ever read.
// [defSlot(def), defSlot(def)+1) // Then the register is essentially dead at the instruction that defines
DEBUG(dbgs() << " dead"); // it. Hence its interval is:
end = start.getStoreIndex(); // [defSlot(def), defSlot(def)+1)
SeenDefUse = true; DEBUG(dbgs() << " dead");
break; end = start.getStoreIndex();
SeenDefUse = true;
break;
}
} }
++mi; ++mi;
if (mi != MBB->end()) { if (mi != E && !mi->isDebugValue()) {
baseIndex = indexes_->getNextNonNullIndex(baseIndex); baseIndex = indexes_->getNextNonNullIndex(baseIndex);
} }
} }
@ -1056,7 +1060,7 @@ rewriteInstructionForSpills(const LiveInterval &li, const VNInfo *VNI,
// If this is the rematerializable definition MI itself and // If this is the rematerializable definition MI itself and
// all of its uses are rematerialized, simply delete it. // all of its uses are rematerialized, simply delete it.
if (MI == ReMatOrigDefMI && CanDelete) { if (MI == ReMatOrigDefMI && CanDelete) {
DEBUG(dbgs() << "\t\t\t\tErasing re-materlizable def: " DEBUG(dbgs() << "\t\t\t\tErasing re-materializable def: "
<< MI << '\n'); << MI << '\n');
RemoveMachineInstrFromMaps(MI); RemoveMachineInstrFromMaps(MI);
vrm.RemoveMachineInstrFromMaps(MI); vrm.RemoveMachineInstrFromMaps(MI);
@ -1299,6 +1303,12 @@ rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
MachineInstr *MI = &*ri; MachineInstr *MI = &*ri;
MachineOperand &O = ri.getOperand(); MachineOperand &O = ri.getOperand();
++ri; ++ri;
if (MI->isDebugValue()) {
// Remove debug info for now.
O.setReg(0U);
DEBUG(dbgs() << "Removing debug info due to spill:" << "\t" << *MI);
continue;
}
assert(!O.isImplicit() && "Spilling register that's used as implicit use?"); assert(!O.isImplicit() && "Spilling register that's used as implicit use?");
SlotIndex index = getInstructionIndex(MI); SlotIndex index = getInstructionIndex(MI);
if (index < start || index >= end) if (index < start || index >= end)

View File

@ -375,8 +375,9 @@ bool SimpleRegisterCoalescing::RemoveCopyByCommutingDef(LiveInterval &IntA,
// If some of the uses of IntA.reg is already coalesced away, return false. // If some of the uses of IntA.reg is already coalesced away, return false.
// It's not possible to determine whether it's safe to perform the coalescing. // It's not possible to determine whether it's safe to perform the coalescing.
for (MachineRegisterInfo::use_iterator UI = mri_->use_begin(IntA.reg), for (MachineRegisterInfo::use_nodbg_iterator UI =
UE = mri_->use_end(); UI != UE; ++UI) { mri_->use_nodbg_begin(IntA.reg),
UE = mri_->use_nodbg_end(); UI != UE; ++UI) {
MachineInstr *UseMI = &*UI; MachineInstr *UseMI = &*UI;
SlotIndex UseIdx = li_->getInstructionIndex(UseMI); SlotIndex UseIdx = li_->getInstructionIndex(UseMI);
LiveInterval::iterator ULR = IntA.FindLiveRangeContaining(UseIdx); LiveInterval::iterator ULR = IntA.FindLiveRangeContaining(UseIdx);
@ -430,6 +431,12 @@ bool SimpleRegisterCoalescing::RemoveCopyByCommutingDef(LiveInterval &IntA,
++UI; ++UI;
if (JoinedCopies.count(UseMI)) if (JoinedCopies.count(UseMI))
continue; continue;
if (UseMI->isDebugValue()) {
// FIXME These don't have an instruction index. Not clear we have enough
// info to decide whether to do this replacement or not. For now do it.
UseMO.setReg(NewReg);
continue;
}
SlotIndex UseIdx = li_->getInstructionIndex(UseMI).getUseIndex(); SlotIndex UseIdx = li_->getInstructionIndex(UseMI).getUseIndex();
LiveInterval::iterator ULR = IntA.FindLiveRangeContaining(UseIdx); LiveInterval::iterator ULR = IntA.FindLiveRangeContaining(UseIdx);
if (ULR == IntA.end() || ULR->valno != AValNo) if (ULR == IntA.end() || ULR->valno != AValNo)
@ -1029,8 +1036,9 @@ SimpleRegisterCoalescing::isWinToJoinVRWithSrcPhysReg(MachineInstr *CopyMI,
unsigned Threshold = allocatableRCRegs_[RC].count() * 2; unsigned Threshold = allocatableRCRegs_[RC].count() * 2;
unsigned Length = li_->getApproximateInstructionCount(DstInt); unsigned Length = li_->getApproximateInstructionCount(DstInt);
if (Length > Threshold && if (Length > Threshold &&
(((float)std::distance(mri_->use_begin(DstInt.reg), (((float)std::distance(mri_->use_nodbg_begin(DstInt.reg),
mri_->use_end()) / Length) < (1.0 / Threshold))) mri_->use_nodbg_end()) / Length) <
(1.0 / Threshold)))
return false; return false;
// If the virtual register live interval extends into a loop, turn down // If the virtual register live interval extends into a loop, turn down
@ -1079,15 +1087,16 @@ SimpleRegisterCoalescing::isWinToJoinVRWithDstPhysReg(MachineInstr *CopyMI,
MachineBasicBlock *CopyMBB, MachineBasicBlock *CopyMBB,
LiveInterval &DstInt, LiveInterval &DstInt,
LiveInterval &SrcInt) { LiveInterval &SrcInt) {
// If the virtual register live interval is long but it has low use desity, // If the virtual register live interval is long but it has low use density,
// do not join them, instead mark the physical register as its allocation // do not join them, instead mark the physical register as its allocation
// preference. // preference.
const TargetRegisterClass *RC = mri_->getRegClass(SrcInt.reg); const TargetRegisterClass *RC = mri_->getRegClass(SrcInt.reg);
unsigned Threshold = allocatableRCRegs_[RC].count() * 2; unsigned Threshold = allocatableRCRegs_[RC].count() * 2;
unsigned Length = li_->getApproximateInstructionCount(SrcInt); unsigned Length = li_->getApproximateInstructionCount(SrcInt);
if (Length > Threshold && if (Length > Threshold &&
(((float)std::distance(mri_->use_begin(SrcInt.reg), (((float)std::distance(mri_->use_nodbg_begin(SrcInt.reg),
mri_->use_end()) / Length) < (1.0 / Threshold))) mri_->use_nodbg_end()) / Length) <
(1.0 / Threshold)))
return false; return false;
if (SrcInt.empty()) if (SrcInt.empty())
@ -1140,10 +1149,10 @@ SimpleRegisterCoalescing::isWinToJoinCrossClass(unsigned LargeReg,
unsigned LargeSize = li_->getApproximateInstructionCount(LargeInt); unsigned LargeSize = li_->getApproximateInstructionCount(LargeInt);
unsigned SmallSize = li_->getApproximateInstructionCount(SmallInt); unsigned SmallSize = li_->getApproximateInstructionCount(SmallInt);
if (SmallSize > Threshold || LargeSize > Threshold) if (SmallSize > Threshold || LargeSize > Threshold)
if ((float)std::distance(mri_->use_begin(SmallReg), if ((float)std::distance(mri_->use_nodbg_begin(SmallReg),
mri_->use_end()) / SmallSize < mri_->use_nodbg_end()) / SmallSize <
(float)std::distance(mri_->use_begin(LargeReg), (float)std::distance(mri_->use_nodbg_begin(LargeReg),
mri_->use_end()) / LargeSize) mri_->use_nodbg_end()) / LargeSize)
return false; return false;
return true; return true;
} }
@ -1630,8 +1639,8 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
unsigned Length = li_->getApproximateInstructionCount(JoinVInt); unsigned Length = li_->getApproximateInstructionCount(JoinVInt);
float Ratio = 1.0 / Threshold; float Ratio = 1.0 / Threshold;
if (Length > Threshold && if (Length > Threshold &&
(((float)std::distance(mri_->use_begin(JoinVReg), (((float)std::distance(mri_->use_nodbg_begin(JoinVReg),
mri_->use_end()) / Length) < Ratio)) { mri_->use_nodbg_end()) / Length) < Ratio)) {
mri_->setRegAllocationHint(JoinVInt.reg, 0, JoinPReg); mri_->setRegAllocationHint(JoinVInt.reg, 0, JoinPReg);
++numAborts; ++numAborts;
DEBUG(dbgs() << "\tMay tie down a physical register, abort!\n"); DEBUG(dbgs() << "\tMay tie down a physical register, abort!\n");
@ -2564,8 +2573,8 @@ SimpleRegisterCoalescing::differingRegisterClasses(unsigned RegA,
return !RegClassA->contains(RegB); return !RegClassA->contains(RegB);
} }
/// lastRegisterUse - Returns the last use of the specific register between /// lastRegisterUse - Returns the last (non-debug) use of the specific register
/// cycles Start and End or NULL if there are no uses. /// between cycles Start and End or NULL if there are no uses.
MachineOperand * MachineOperand *
SimpleRegisterCoalescing::lastRegisterUse(SlotIndex Start, SimpleRegisterCoalescing::lastRegisterUse(SlotIndex Start,
SlotIndex End, SlotIndex End,
@ -2574,8 +2583,8 @@ SimpleRegisterCoalescing::lastRegisterUse(SlotIndex Start,
UseIdx = SlotIndex(); UseIdx = SlotIndex();
if (TargetRegisterInfo::isVirtualRegister(Reg)) { if (TargetRegisterInfo::isVirtualRegister(Reg)) {
MachineOperand *LastUse = NULL; MachineOperand *LastUse = NULL;
for (MachineRegisterInfo::use_iterator I = mri_->use_begin(Reg), for (MachineRegisterInfo::use_nodbg_iterator I = mri_->use_nodbg_begin(Reg),
E = mri_->use_end(); I != E; ++I) { E = mri_->use_nodbg_end(); I != E; ++I) {
MachineOperand &Use = I.getOperand(); MachineOperand &Use = I.getOperand();
MachineInstr *UseMI = Use.getParent(); MachineInstr *UseMI = Use.getParent();
unsigned SrcReg, DstReg, SrcSubIdx, DstSubIdx; unsigned SrcReg, DstReg, SrcSubIdx, DstSubIdx;