forked from OSchip/llvm-project
Refactor: Simplify boolean conditional return statements in lib/CodeGen.
Patch by Richard. llvm-svn: 251213
This commit is contained in:
parent
f82ed2a28c
commit
84921b9860
|
@ -620,11 +620,8 @@ ProfitableToMerge(MachineBasicBlock *MBB1, MachineBasicBlock *MBB2,
|
|||
// branch instruction, which is likely to be smaller than the 2
|
||||
// instructions that would be deleted in the merge.
|
||||
MachineFunction *MF = MBB1->getParent();
|
||||
if (EffectiveTailLen >= 2 && MF->getFunction()->optForSize() &&
|
||||
(I1 == MBB1->begin() || I2 == MBB2->begin()))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
return EffectiveTailLen >= 2 && MF->getFunction()->optForSize() &&
|
||||
(I1 == MBB1->begin() || I2 == MBB2->begin());
|
||||
}
|
||||
|
||||
/// ComputeSameTails - Look through all the blocks in MergePotentials that have
|
||||
|
|
|
@ -2361,9 +2361,7 @@ class TypePromotionHelper {
|
|||
/// \brief Utility function to determine if \p OpIdx should be promoted when
|
||||
/// promoting \p Inst.
|
||||
static bool shouldExtOperand(const Instruction *Inst, int OpIdx) {
|
||||
if (isa<SelectInst>(Inst) && OpIdx == 0)
|
||||
return false;
|
||||
return true;
|
||||
return !(isa<SelectInst>(Inst) && OpIdx == 0);
|
||||
}
|
||||
|
||||
/// \brief Utility function to promote the operand of \p Ext when this
|
||||
|
@ -2500,10 +2498,8 @@ bool TypePromotionHelper::canGetThrough(const Instruction *Inst,
|
|||
return false;
|
||||
|
||||
// #2 check that the truncate just drops extended bits.
|
||||
if (Inst->getType()->getIntegerBitWidth() >= OpndType->getIntegerBitWidth())
|
||||
return true;
|
||||
|
||||
return false;
|
||||
return Inst->getType()->getIntegerBitWidth() >=
|
||||
OpndType->getIntegerBitWidth();
|
||||
}
|
||||
|
||||
TypePromotionHelper::Action TypePromotionHelper::getAction(
|
||||
|
@ -3913,11 +3909,8 @@ static bool sinkSelectOperand(const TargetTransformInfo *TTI, Value *V) {
|
|||
auto *I = dyn_cast<Instruction>(V);
|
||||
// If it's safe to speculatively execute, then it should not have side
|
||||
// effects; therefore, it's safe to sink and possibly *not* execute.
|
||||
if (I && I->hasOneUse() && isSafeToSpeculativelyExecute(I) &&
|
||||
TTI->getUserCost(I) >= TargetTransformInfo::TCC_Expensive)
|
||||
return true;
|
||||
|
||||
return false;
|
||||
return I && I->hasOneUse() && isSafeToSpeculativelyExecute(I) &&
|
||||
TTI->getUserCost(I) >= TargetTransformInfo::TCC_Expensive;
|
||||
}
|
||||
|
||||
/// Returns true if a SelectInst should be turned into an explicit branch.
|
||||
|
|
|
@ -91,9 +91,7 @@ public:
|
|||
bool dominates(MachineBasicBlock *MBB) {
|
||||
if (LBlocks.empty())
|
||||
LS.getMachineBasicBlocks(DL, LBlocks);
|
||||
if (LBlocks.count(MBB) != 0 || LS.dominates(DL, MBB))
|
||||
return true;
|
||||
return false;
|
||||
return LBlocks.count(MBB) != 0 || LS.dominates(DL, MBB);
|
||||
}
|
||||
};
|
||||
} // end anonymous namespace
|
||||
|
|
|
@ -468,11 +468,8 @@ static bool isNoReturnDef(const MachineOperand &MO) {
|
|||
if (MF.getFunction()->hasFnAttribute(Attribute::UWTable))
|
||||
return false;
|
||||
const Function *Called = getCalledFunction(MI);
|
||||
if (Called == nullptr || !Called->hasFnAttribute(Attribute::NoReturn)
|
||||
|| !Called->hasFnAttribute(Attribute::NoUnwind))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
return !(Called == nullptr || !Called->hasFnAttribute(Attribute::NoReturn) ||
|
||||
!Called->hasFnAttribute(Attribute::NoUnwind));
|
||||
}
|
||||
|
||||
bool MachineRegisterInfo::isPhysRegModified(unsigned PhysReg) const {
|
||||
|
|
|
@ -686,10 +686,7 @@ bool PeepholeOptimizer::findNextSource(unsigned Reg, unsigned SubReg,
|
|||
}
|
||||
|
||||
// If we did not find a more suitable source, there is nothing to optimize.
|
||||
if (CurSrcPair.Reg == Reg)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
return CurSrcPair.Reg != Reg;
|
||||
}
|
||||
|
||||
/// \brief Insert a PHI instruction with incoming edges \p SrcRegs that are
|
||||
|
|
|
@ -50,9 +50,7 @@ bool PseudoSourceValue::isAliased(const MachineFrameInfo *) const {
|
|||
}
|
||||
|
||||
bool PseudoSourceValue::mayAlias(const MachineFrameInfo *) const {
|
||||
if (isGOT() || isConstantPool() || isJumpTable())
|
||||
return false;
|
||||
return true;
|
||||
return !(isGOT() || isConstantPool() || isJumpTable());
|
||||
}
|
||||
|
||||
bool FixedStackPseudoSourceValue::isConstant(
|
||||
|
|
|
@ -462,11 +462,9 @@ void ScheduleDAGInstrs::addVRegUseDeps(SUnit *SU, unsigned OperIdx) {
|
|||
/// Return true if MI is an instruction we are unable to reason about
|
||||
/// (like a call or something with unmodeled side effects).
|
||||
static inline bool isGlobalMemoryObject(AliasAnalysis *AA, MachineInstr *MI) {
|
||||
if (MI->isCall() || MI->hasUnmodeledSideEffects() ||
|
||||
(MI->hasOrderedMemoryRef() &&
|
||||
(!MI->mayLoad() || !MI->isInvariantLoad(AA))))
|
||||
return true;
|
||||
return false;
|
||||
return MI->isCall() || MI->hasUnmodeledSideEffects() ||
|
||||
(MI->hasOrderedMemoryRef() &&
|
||||
(!MI->mayLoad() || !MI->isInvariantLoad(AA)));
|
||||
}
|
||||
|
||||
// This MI might have either incomplete info, or known to be unsafe
|
||||
|
|
|
@ -465,10 +465,7 @@ bool StackProtector::InsertStackProtectors() {
|
|||
|
||||
// Return if we didn't modify any basic blocks. i.e., there are no return
|
||||
// statements in the function.
|
||||
if (!HasPrologue)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
return HasPrologue;
|
||||
}
|
||||
|
||||
/// CreateFailBB - Create a basic block to jump to when the stack protector
|
||||
|
|
|
@ -576,10 +576,7 @@ bool TargetInstrInfo::hasReassociableOperands(
|
|||
MI2 = MRI.getUniqueVRegDef(Op2.getReg());
|
||||
|
||||
// And they need to be in the trace (otherwise, they won't have a depth).
|
||||
if (MI1 && MI2 && MI1->getParent() == MBB && MI2->getParent() == MBB)
|
||||
return true;
|
||||
|
||||
return false;
|
||||
return MI1 && MI2 && MI1->getParent() == MBB && MI2->getParent() == MBB;
|
||||
}
|
||||
|
||||
bool TargetInstrInfo::hasReassociableSibling(const MachineInstr &Inst,
|
||||
|
@ -600,11 +597,9 @@ bool TargetInstrInfo::hasReassociableSibling(const MachineInstr &Inst,
|
|||
// 2. The previous instruction must have virtual register definitions for its
|
||||
// operands in the same basic block as Inst.
|
||||
// 3. The previous instruction's result must only be used by Inst.
|
||||
if (MI1->getOpcode() == AssocOpcode && hasReassociableOperands(*MI1, MBB) &&
|
||||
MRI.hasOneNonDBGUse(MI1->getOperand(0).getReg()))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
return MI1->getOpcode() == AssocOpcode &&
|
||||
hasReassociableOperands(*MI1, MBB) &&
|
||||
MRI.hasOneNonDBGUse(MI1->getOperand(0).getReg());
|
||||
}
|
||||
|
||||
// 1. The operation must be associative and commutative.
|
||||
|
@ -613,12 +608,9 @@ bool TargetInstrInfo::hasReassociableSibling(const MachineInstr &Inst,
|
|||
// 3. The instruction must have a reassociable sibling.
|
||||
bool TargetInstrInfo::isReassociationCandidate(const MachineInstr &Inst,
|
||||
bool &Commuted) const {
|
||||
if (isAssociativeAndCommutative(Inst) &&
|
||||
hasReassociableOperands(Inst, Inst.getParent()) &&
|
||||
hasReassociableSibling(Inst, Commuted))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
return isAssociativeAndCommutative(Inst) &&
|
||||
hasReassociableOperands(Inst, Inst.getParent()) &&
|
||||
hasReassociableSibling(Inst, Commuted);
|
||||
}
|
||||
|
||||
// The concept of the reassociation pass is that these operations can benefit
|
||||
|
@ -940,10 +932,7 @@ bool TargetInstrInfo::isSchedulingBoundary(const MachineInstr *MI,
|
|||
// modification.
|
||||
const TargetLowering &TLI = *MF.getSubtarget().getTargetLowering();
|
||||
const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
|
||||
if (MI->modifiesRegister(TLI.getStackPointerRegisterToSaveRestore(), TRI))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
return MI->modifiesRegister(TLI.getStackPointerRegisterToSaveRestore(), TRI);
|
||||
}
|
||||
|
||||
// Provide a global flag for disabling the PreRA hazard recognizer that targets
|
||||
|
|
Loading…
Reference in New Issue