forked from OSchip/llvm-project
Revert r334729 "[DAG] Avoid needing to walk out legalization tables. NFCI."
This reverts commit r334729. llvm-svn: 334869
This commit is contained in:
parent
1c9df30eca
commit
dc705a6a89
|
@ -84,10 +84,9 @@ void DAGTypeLegalizer::PerformExpensiveChecks() {
|
||||||
SDValue Res(&Node, i);
|
SDValue Res(&Node, i);
|
||||||
EVT VT = Res.getValueType();
|
EVT VT = Res.getValueType();
|
||||||
bool Failed = false;
|
bool Failed = false;
|
||||||
auto ResId = getTableId(Res);
|
|
||||||
|
|
||||||
unsigned Mapped = 0;
|
unsigned Mapped = 0;
|
||||||
if (ReplacedValues.find(ResId) != ReplacedValues.end()) {
|
if (ReplacedValues.find(Res) != ReplacedValues.end()) {
|
||||||
Mapped |= 1;
|
Mapped |= 1;
|
||||||
// Check that remapped values are only used by nodes marked NewNode.
|
// Check that remapped values are only used by nodes marked NewNode.
|
||||||
for (SDNode::use_iterator UI = Node.use_begin(), UE = Node.use_end();
|
for (SDNode::use_iterator UI = Node.use_begin(), UE = Node.use_end();
|
||||||
|
@ -98,31 +97,30 @@ void DAGTypeLegalizer::PerformExpensiveChecks() {
|
||||||
|
|
||||||
// Check that the final result of applying ReplacedValues is not
|
// Check that the final result of applying ReplacedValues is not
|
||||||
// marked NewNode.
|
// marked NewNode.
|
||||||
auto NewValId = ReplacedValues[ResId];
|
SDValue NewVal = ReplacedValues[Res];
|
||||||
auto I = ReplacedValues.find(NewValId);
|
DenseMap<SDValue, SDValue>::iterator I = ReplacedValues.find(NewVal);
|
||||||
while (I != ReplacedValues.end()) {
|
while (I != ReplacedValues.end()) {
|
||||||
NewValId = I->second;
|
NewVal = I->second;
|
||||||
I = ReplacedValues.find(NewValId);
|
I = ReplacedValues.find(NewVal);
|
||||||
}
|
}
|
||||||
SDValue NewVal = getSDValue(NewValId);
|
|
||||||
assert(NewVal.getNode()->getNodeId() != NewNode &&
|
assert(NewVal.getNode()->getNodeId() != NewNode &&
|
||||||
"ReplacedValues maps to a new node!");
|
"ReplacedValues maps to a new node!");
|
||||||
}
|
}
|
||||||
if (PromotedIntegers.find(ResId) != PromotedIntegers.end())
|
if (PromotedIntegers.find(Res) != PromotedIntegers.end())
|
||||||
Mapped |= 2;
|
Mapped |= 2;
|
||||||
if (SoftenedFloats.find(ResId) != SoftenedFloats.end())
|
if (SoftenedFloats.find(Res) != SoftenedFloats.end())
|
||||||
Mapped |= 4;
|
Mapped |= 4;
|
||||||
if (ScalarizedVectors.find(ResId) != ScalarizedVectors.end())
|
if (ScalarizedVectors.find(Res) != ScalarizedVectors.end())
|
||||||
Mapped |= 8;
|
Mapped |= 8;
|
||||||
if (ExpandedIntegers.find(ResId) != ExpandedIntegers.end())
|
if (ExpandedIntegers.find(Res) != ExpandedIntegers.end())
|
||||||
Mapped |= 16;
|
Mapped |= 16;
|
||||||
if (ExpandedFloats.find(ResId) != ExpandedFloats.end())
|
if (ExpandedFloats.find(Res) != ExpandedFloats.end())
|
||||||
Mapped |= 32;
|
Mapped |= 32;
|
||||||
if (SplitVectors.find(ResId) != SplitVectors.end())
|
if (SplitVectors.find(Res) != SplitVectors.end())
|
||||||
Mapped |= 64;
|
Mapped |= 64;
|
||||||
if (WidenedVectors.find(ResId) != WidenedVectors.end())
|
if (WidenedVectors.find(Res) != WidenedVectors.end())
|
||||||
Mapped |= 128;
|
Mapped |= 128;
|
||||||
if (PromotedFloats.find(ResId) != PromotedFloats.end())
|
if (PromotedFloats.find(Res) != PromotedFloats.end())
|
||||||
Mapped |= 256;
|
Mapped |= 256;
|
||||||
|
|
||||||
if (Node.getNodeId() != Processed) {
|
if (Node.getNodeId() != Processed) {
|
||||||
|
@ -493,6 +491,9 @@ SDNode *DAGTypeLegalizer::AnalyzeNewNode(SDNode *N) {
|
||||||
if (N->getNodeId() != NewNode && N->getNodeId() != Unanalyzed)
|
if (N->getNodeId() != NewNode && N->getNodeId() != Unanalyzed)
|
||||||
return N;
|
return N;
|
||||||
|
|
||||||
|
// Remove any stale map entries.
|
||||||
|
ExpungeNode(N);
|
||||||
|
|
||||||
// Okay, we know that this node is new. Recursively walk all of its operands
|
// Okay, we know that this node is new. Recursively walk all of its operands
|
||||||
// to see if they are new also. The depth of this walk is bounded by the size
|
// to see if they are new also. The depth of this walk is bounded by the size
|
||||||
// of the new tree that was constructed (usually 2-3 nodes), so we don't worry
|
// of the new tree that was constructed (usually 2-3 nodes), so we don't worry
|
||||||
|
@ -543,6 +544,7 @@ SDNode *DAGTypeLegalizer::AnalyzeNewNode(SDNode *N) {
|
||||||
// to remap the operands, since they are the same as the operands we
|
// to remap the operands, since they are the same as the operands we
|
||||||
// remapped above.
|
// remapped above.
|
||||||
N = M;
|
N = M;
|
||||||
|
ExpungeNode(N);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -563,24 +565,106 @@ void DAGTypeLegalizer::AnalyzeNewValue(SDValue &Val) {
|
||||||
RemapValue(Val);
|
RemapValue(Val);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the specified value was already legalized to another value,
|
/// If N has a bogus mapping in ReplacedValues, eliminate it.
|
||||||
/// replace it by that value.
|
/// This can occur when a node is deleted then reallocated as a new node -
|
||||||
void DAGTypeLegalizer::RemapValue(SDValue &V) {
|
/// the mapping in ReplacedValues applies to the deleted node, not the new
|
||||||
auto Id = getTableId(V);
|
/// one.
|
||||||
V = getSDValue(Id);
|
/// The only map that can have a deleted node as a source is ReplacedValues.
|
||||||
|
/// Other maps can have deleted nodes as targets, but since their looked-up
|
||||||
|
/// values are always immediately remapped using RemapValue, resulting in a
|
||||||
|
/// not-deleted node, this is harmless as long as ReplacedValues/RemapValue
|
||||||
|
/// always performs correct mappings. In order to keep the mapping correct,
|
||||||
|
/// ExpungeNode should be called on any new nodes *before* adding them as
|
||||||
|
/// either source or target to ReplacedValues (which typically means calling
|
||||||
|
/// Expunge when a new node is first seen, since it may no longer be marked
|
||||||
|
/// NewNode by the time it is added to ReplacedValues).
|
||||||
|
void DAGTypeLegalizer::ExpungeNode(SDNode *N) {
|
||||||
|
if (N->getNodeId() != NewNode)
|
||||||
|
return;
|
||||||
|
|
||||||
|
// If N is not remapped by ReplacedValues then there is nothing to do.
|
||||||
|
unsigned i, e;
|
||||||
|
for (i = 0, e = N->getNumValues(); i != e; ++i)
|
||||||
|
if (ReplacedValues.find(SDValue(N, i)) != ReplacedValues.end())
|
||||||
|
break;
|
||||||
|
|
||||||
|
if (i == e)
|
||||||
|
return;
|
||||||
|
|
||||||
|
// Remove N from all maps - this is expensive but rare.
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, SDValue>::iterator I = PromotedIntegers.begin(),
|
||||||
|
E = PromotedIntegers.end(); I != E; ++I) {
|
||||||
|
assert(I->first.getNode() != N);
|
||||||
|
RemapValue(I->second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, SDValue>::iterator I = PromotedFloats.begin(),
|
||||||
|
E = PromotedFloats.end(); I != E; ++I) {
|
||||||
|
assert(I->first.getNode() != N);
|
||||||
|
RemapValue(I->second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, SDValue>::iterator I = SoftenedFloats.begin(),
|
||||||
|
E = SoftenedFloats.end(); I != E; ++I) {
|
||||||
|
assert(I->first.getNode() != N);
|
||||||
|
RemapValue(I->second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, SDValue>::iterator I = ScalarizedVectors.begin(),
|
||||||
|
E = ScalarizedVectors.end(); I != E; ++I) {
|
||||||
|
assert(I->first.getNode() != N);
|
||||||
|
RemapValue(I->second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, SDValue>::iterator I = WidenedVectors.begin(),
|
||||||
|
E = WidenedVectors.end(); I != E; ++I) {
|
||||||
|
assert(I->first.getNode() != N);
|
||||||
|
RemapValue(I->second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, std::pair<SDValue, SDValue> >::iterator
|
||||||
|
I = ExpandedIntegers.begin(), E = ExpandedIntegers.end(); I != E; ++I){
|
||||||
|
assert(I->first.getNode() != N);
|
||||||
|
RemapValue(I->second.first);
|
||||||
|
RemapValue(I->second.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, std::pair<SDValue, SDValue> >::iterator
|
||||||
|
I = ExpandedFloats.begin(), E = ExpandedFloats.end(); I != E; ++I) {
|
||||||
|
assert(I->first.getNode() != N);
|
||||||
|
RemapValue(I->second.first);
|
||||||
|
RemapValue(I->second.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, std::pair<SDValue, SDValue> >::iterator
|
||||||
|
I = SplitVectors.begin(), E = SplitVectors.end(); I != E; ++I) {
|
||||||
|
assert(I->first.getNode() != N);
|
||||||
|
RemapValue(I->second.first);
|
||||||
|
RemapValue(I->second.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DenseMap<SDValue, SDValue>::iterator I = ReplacedValues.begin(),
|
||||||
|
E = ReplacedValues.end(); I != E; ++I)
|
||||||
|
RemapValue(I->second);
|
||||||
|
|
||||||
|
for (unsigned i = 0, e = N->getNumValues(); i != e; ++i)
|
||||||
|
ReplacedValues.erase(SDValue(N, i));
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::RemapId(TableId &Id) {
|
/// If the specified value was already legalized to another value,
|
||||||
auto I = ReplacedValues.find(Id);
|
/// replace it by that value.
|
||||||
|
void DAGTypeLegalizer::RemapValue(SDValue &N) {
|
||||||
|
DenseMap<SDValue, SDValue>::iterator I = ReplacedValues.find(N);
|
||||||
if (I != ReplacedValues.end()) {
|
if (I != ReplacedValues.end()) {
|
||||||
// Use path compression to speed up future lookups if values get multiply
|
// Use path compression to speed up future lookups if values get multiply
|
||||||
// replaced with other values.
|
// replaced with other values.
|
||||||
RemapId(I->second);
|
RemapValue(I->second);
|
||||||
Id = I->second;
|
N = I->second;
|
||||||
|
|
||||||
// Note that N = IdToValueMap[Id] it is possible to have
|
// Note that it is possible to have N.getNode()->getNodeId() == NewNode at
|
||||||
// N.getNode()->getNodeId() == NewNode at this point because it is possible
|
// this point because it is possible for a node to be put in the map before
|
||||||
// for a node to be put in the map before being processed.
|
// being processed.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -637,22 +721,20 @@ void DAGTypeLegalizer::ReplaceValueWith(SDValue From, SDValue To) {
|
||||||
assert(From.getNode() != To.getNode() && "Potential legalization loop!");
|
assert(From.getNode() != To.getNode() && "Potential legalization loop!");
|
||||||
|
|
||||||
// If expansion produced new nodes, make sure they are properly marked.
|
// If expansion produced new nodes, make sure they are properly marked.
|
||||||
AnalyzeNewValue(To);
|
ExpungeNode(From.getNode());
|
||||||
|
AnalyzeNewValue(To); // Expunges To.
|
||||||
|
|
||||||
// Anything that used the old node should now use the new one. Note that this
|
// Anything that used the old node should now use the new one. Note that this
|
||||||
// can potentially cause recursive merging.
|
// can potentially cause recursive merging.
|
||||||
SmallSetVector<SDNode*, 16> NodesToAnalyze;
|
SmallSetVector<SDNode*, 16> NodesToAnalyze;
|
||||||
NodeUpdateListener NUL(*this, NodesToAnalyze);
|
NodeUpdateListener NUL(*this, NodesToAnalyze);
|
||||||
do {
|
do {
|
||||||
|
|
||||||
// The old node may be present in a map like ExpandedIntegers or
|
|
||||||
// PromotedIntegers. Inform maps about the replacement.
|
|
||||||
auto FromId = getTableId(From);
|
|
||||||
auto ToId = getTableId(To);
|
|
||||||
|
|
||||||
ReplacedValues[FromId] = ToId;
|
|
||||||
DAG.ReplaceAllUsesOfValueWith(From, To);
|
DAG.ReplaceAllUsesOfValueWith(From, To);
|
||||||
|
|
||||||
|
// The old node may still be present in a map like ExpandedIntegers or
|
||||||
|
// PromotedIntegers. Inform maps about the replacement.
|
||||||
|
ReplacedValues[From] = To;
|
||||||
|
|
||||||
// Process the list of nodes that need to be reanalyzed.
|
// Process the list of nodes that need to be reanalyzed.
|
||||||
while (!NodesToAnalyze.empty()) {
|
while (!NodesToAnalyze.empty()) {
|
||||||
SDNode *N = NodesToAnalyze.back();
|
SDNode *N = NodesToAnalyze.back();
|
||||||
|
@ -676,14 +758,12 @@ void DAGTypeLegalizer::ReplaceValueWith(SDValue From, SDValue To) {
|
||||||
SDValue NewVal(M, i);
|
SDValue NewVal(M, i);
|
||||||
if (M->getNodeId() == Processed)
|
if (M->getNodeId() == Processed)
|
||||||
RemapValue(NewVal);
|
RemapValue(NewVal);
|
||||||
|
DAG.ReplaceAllUsesOfValueWith(OldVal, NewVal);
|
||||||
// OldVal may be a target of the ReplacedValues map which was marked
|
// OldVal may be a target of the ReplacedValues map which was marked
|
||||||
// NewNode to force reanalysis because it was updated. Ensure that
|
// NewNode to force reanalysis because it was updated. Ensure that
|
||||||
// anything that ReplacedValues mapped to OldVal will now be mapped
|
// anything that ReplacedValues mapped to OldVal will now be mapped
|
||||||
// all the way to NewVal.
|
// all the way to NewVal.
|
||||||
auto OldValId = getTableId(OldVal);
|
ReplacedValues[OldVal] = NewVal;
|
||||||
auto NewValId = getTableId(NewVal);
|
|
||||||
DAG.ReplaceAllUsesOfValueWith(OldVal, NewVal);
|
|
||||||
ReplacedValues[OldValId] = NewValId;
|
|
||||||
}
|
}
|
||||||
// The original node continues to exist in the DAG, marked NewNode.
|
// The original node continues to exist in the DAG, marked NewNode.
|
||||||
}
|
}
|
||||||
|
@ -700,9 +780,9 @@ void DAGTypeLegalizer::SetPromotedInteger(SDValue Op, SDValue Result) {
|
||||||
"Invalid type for promoted integer");
|
"Invalid type for promoted integer");
|
||||||
AnalyzeNewValue(Result);
|
AnalyzeNewValue(Result);
|
||||||
|
|
||||||
auto &OpIdEntry = PromotedIntegers[getTableId(Op)];
|
SDValue &OpEntry = PromotedIntegers[Op];
|
||||||
assert((OpIdEntry == 0) && "Node is already promoted!");
|
assert(!OpEntry.getNode() && "Node is already promoted!");
|
||||||
OpIdEntry = getTableId(Result);
|
OpEntry = Result;
|
||||||
|
|
||||||
DAG.transferDbgValues(Op, Result);
|
DAG.transferDbgValues(Op, Result);
|
||||||
}
|
}
|
||||||
|
@ -717,15 +797,15 @@ void DAGTypeLegalizer::SetSoftenedFloat(SDValue Op, SDValue Result) {
|
||||||
"Invalid type for softened float");
|
"Invalid type for softened float");
|
||||||
AnalyzeNewValue(Result);
|
AnalyzeNewValue(Result);
|
||||||
|
|
||||||
auto &OpIdEntry = SoftenedFloats[getTableId(Op)];
|
SDValue &OpEntry = SoftenedFloats[Op];
|
||||||
// Allow repeated calls to save f128 type nodes
|
// Allow repeated calls to save f128 type nodes
|
||||||
// or any node with type that transforms to itself.
|
// or any node with type that transforms to itself.
|
||||||
// Many operations on these types are not softened.
|
// Many operations on these types are not softened.
|
||||||
assert(((OpIdEntry == 0) ||
|
assert((!OpEntry.getNode()||
|
||||||
Op.getValueType() ==
|
Op.getValueType() ==
|
||||||
TLI.getTypeToTransformTo(*DAG.getContext(), Op.getValueType())) &&
|
TLI.getTypeToTransformTo(*DAG.getContext(), Op.getValueType())) &&
|
||||||
"Node is already converted to integer!");
|
"Node is already converted to integer!");
|
||||||
OpIdEntry = getTableId(Result);
|
OpEntry = Result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::SetPromotedFloat(SDValue Op, SDValue Result) {
|
void DAGTypeLegalizer::SetPromotedFloat(SDValue Op, SDValue Result) {
|
||||||
|
@ -734,9 +814,9 @@ void DAGTypeLegalizer::SetPromotedFloat(SDValue Op, SDValue Result) {
|
||||||
"Invalid type for promoted float");
|
"Invalid type for promoted float");
|
||||||
AnalyzeNewValue(Result);
|
AnalyzeNewValue(Result);
|
||||||
|
|
||||||
auto &OpIdEntry = PromotedFloats[getTableId(Op)];
|
SDValue &OpEntry = PromotedFloats[Op];
|
||||||
assert((OpIdEntry == 0) && "Node is already promoted!");
|
assert(!OpEntry.getNode() && "Node is already promoted!");
|
||||||
OpIdEntry = getTableId(Result);
|
OpEntry = Result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::SetScalarizedVector(SDValue Op, SDValue Result) {
|
void DAGTypeLegalizer::SetScalarizedVector(SDValue Op, SDValue Result) {
|
||||||
|
@ -747,17 +827,19 @@ void DAGTypeLegalizer::SetScalarizedVector(SDValue Op, SDValue Result) {
|
||||||
"Invalid type for scalarized vector");
|
"Invalid type for scalarized vector");
|
||||||
AnalyzeNewValue(Result);
|
AnalyzeNewValue(Result);
|
||||||
|
|
||||||
auto &OpIdEntry = ScalarizedVectors[getTableId(Op)];
|
SDValue &OpEntry = ScalarizedVectors[Op];
|
||||||
assert((OpIdEntry == 0) && "Node is already scalarized!");
|
assert(!OpEntry.getNode() && "Node is already scalarized!");
|
||||||
OpIdEntry = getTableId(Result);
|
OpEntry = Result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::GetExpandedInteger(SDValue Op, SDValue &Lo,
|
void DAGTypeLegalizer::GetExpandedInteger(SDValue Op, SDValue &Lo,
|
||||||
SDValue &Hi) {
|
SDValue &Hi) {
|
||||||
std::pair<TableId, TableId> &Entry = ExpandedIntegers[getTableId(Op)];
|
std::pair<SDValue, SDValue> &Entry = ExpandedIntegers[Op];
|
||||||
assert((Entry.first != 0) && "Operand isn't expanded");
|
RemapValue(Entry.first);
|
||||||
Lo = getSDValue(Entry.first);
|
RemapValue(Entry.second);
|
||||||
Hi = getSDValue(Entry.second);
|
assert(Entry.first.getNode() && "Operand isn't expanded");
|
||||||
|
Lo = Entry.first;
|
||||||
|
Hi = Entry.second;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::SetExpandedInteger(SDValue Op, SDValue Lo,
|
void DAGTypeLegalizer::SetExpandedInteger(SDValue Op, SDValue Lo,
|
||||||
|
@ -783,18 +865,20 @@ void DAGTypeLegalizer::SetExpandedInteger(SDValue Op, SDValue Lo,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remember that this is the result of the node.
|
// Remember that this is the result of the node.
|
||||||
std::pair<TableId, TableId> &Entry = ExpandedIntegers[getTableId(Op)];
|
std::pair<SDValue, SDValue> &Entry = ExpandedIntegers[Op];
|
||||||
assert((Entry.first == 0) && "Node already expanded");
|
assert(!Entry.first.getNode() && "Node already expanded");
|
||||||
Entry.first = getTableId(Lo);
|
Entry.first = Lo;
|
||||||
Entry.second = getTableId(Hi);
|
Entry.second = Hi;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::GetExpandedFloat(SDValue Op, SDValue &Lo,
|
void DAGTypeLegalizer::GetExpandedFloat(SDValue Op, SDValue &Lo,
|
||||||
SDValue &Hi) {
|
SDValue &Hi) {
|
||||||
std::pair<TableId, TableId> &Entry = ExpandedFloats[getTableId(Op)];
|
std::pair<SDValue, SDValue> &Entry = ExpandedFloats[Op];
|
||||||
assert((Entry.first != 0) && "Operand isn't expanded");
|
RemapValue(Entry.first);
|
||||||
Lo = getSDValue(Entry.first);
|
RemapValue(Entry.second);
|
||||||
Hi = getSDValue(Entry.second);
|
assert(Entry.first.getNode() && "Operand isn't expanded");
|
||||||
|
Lo = Entry.first;
|
||||||
|
Hi = Entry.second;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::SetExpandedFloat(SDValue Op, SDValue Lo,
|
void DAGTypeLegalizer::SetExpandedFloat(SDValue Op, SDValue Lo,
|
||||||
|
@ -807,19 +891,21 @@ void DAGTypeLegalizer::SetExpandedFloat(SDValue Op, SDValue Lo,
|
||||||
AnalyzeNewValue(Lo);
|
AnalyzeNewValue(Lo);
|
||||||
AnalyzeNewValue(Hi);
|
AnalyzeNewValue(Hi);
|
||||||
|
|
||||||
std::pair<TableId, TableId> &Entry = ExpandedFloats[getTableId(Op)];
|
// Remember that this is the result of the node.
|
||||||
assert((Entry.first == 0) && "Node already expanded");
|
std::pair<SDValue, SDValue> &Entry = ExpandedFloats[Op];
|
||||||
Entry.first = getTableId(Lo);
|
assert(!Entry.first.getNode() && "Node already expanded");
|
||||||
Entry.second = getTableId(Hi);
|
Entry.first = Lo;
|
||||||
|
Entry.second = Hi;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::GetSplitVector(SDValue Op, SDValue &Lo,
|
void DAGTypeLegalizer::GetSplitVector(SDValue Op, SDValue &Lo,
|
||||||
SDValue &Hi) {
|
SDValue &Hi) {
|
||||||
std::pair<TableId, TableId> &Entry = SplitVectors[getTableId(Op)];
|
std::pair<SDValue, SDValue> &Entry = SplitVectors[Op];
|
||||||
Lo = getSDValue(Entry.first);
|
RemapValue(Entry.first);
|
||||||
Hi = getSDValue(Entry.second);
|
RemapValue(Entry.second);
|
||||||
assert(Lo.getNode() && "Operand isn't split");
|
assert(Entry.first.getNode() && "Operand isn't split");
|
||||||
;
|
Lo = Entry.first;
|
||||||
|
Hi = Entry.second;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::SetSplitVector(SDValue Op, SDValue Lo,
|
void DAGTypeLegalizer::SetSplitVector(SDValue Op, SDValue Lo,
|
||||||
|
@ -835,10 +921,10 @@ void DAGTypeLegalizer::SetSplitVector(SDValue Op, SDValue Lo,
|
||||||
AnalyzeNewValue(Hi);
|
AnalyzeNewValue(Hi);
|
||||||
|
|
||||||
// Remember that this is the result of the node.
|
// Remember that this is the result of the node.
|
||||||
std::pair<TableId, TableId> &Entry = SplitVectors[getTableId(Op)];
|
std::pair<SDValue, SDValue> &Entry = SplitVectors[Op];
|
||||||
assert((Entry.first == 0) && "Node already split");
|
assert(!Entry.first.getNode() && "Node already split");
|
||||||
Entry.first = getTableId(Lo);
|
Entry.first = Lo;
|
||||||
Entry.second = getTableId(Hi);
|
Entry.second = Hi;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DAGTypeLegalizer::SetWidenedVector(SDValue Op, SDValue Result) {
|
void DAGTypeLegalizer::SetWidenedVector(SDValue Op, SDValue Result) {
|
||||||
|
@ -847,9 +933,9 @@ void DAGTypeLegalizer::SetWidenedVector(SDValue Op, SDValue Result) {
|
||||||
"Invalid type for widened vector");
|
"Invalid type for widened vector");
|
||||||
AnalyzeNewValue(Result);
|
AnalyzeNewValue(Result);
|
||||||
|
|
||||||
auto &OpIdEntry = WidenedVectors[getTableId(Op)];
|
SDValue &OpEntry = WidenedVectors[Op];
|
||||||
assert((OpIdEntry == 0) && "Node already widened!");
|
assert(!OpEntry.getNode() && "Node already widened!");
|
||||||
OpIdEntry = getTableId(Result);
|
OpEntry = Result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -93,81 +93,46 @@ private:
|
||||||
N->getOpcode() == ISD::Register;
|
N->getOpcode() == ISD::Register;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bijection from SDValue to unique id. As each created node gets a
|
|
||||||
// new id we do not need to worry about reuse expunging. Should we
|
|
||||||
// run out of ids, we can do a one time expensive compactifcation.
|
|
||||||
typedef unsigned TableId;
|
|
||||||
|
|
||||||
TableId NextValueId = 1;
|
|
||||||
|
|
||||||
SmallDenseMap<SDValue, TableId, 8> ValueToIdMap;
|
|
||||||
SmallDenseMap<TableId, SDValue, 8> IdToValueMap;
|
|
||||||
|
|
||||||
/// For integer nodes that are below legal width, this map indicates what
|
/// For integer nodes that are below legal width, this map indicates what
|
||||||
/// promoted value to use.
|
/// promoted value to use.
|
||||||
SmallDenseMap<TableId, TableId, 8> PromotedIntegers;
|
SmallDenseMap<SDValue, SDValue, 8> PromotedIntegers;
|
||||||
|
|
||||||
/// For integer nodes that need to be expanded this map indicates which
|
/// For integer nodes that need to be expanded this map indicates which
|
||||||
/// operands are the expanded version of the input.
|
/// operands are the expanded version of the input.
|
||||||
SmallDenseMap<TableId, std::pair<TableId, TableId>, 8> ExpandedIntegers;
|
SmallDenseMap<SDValue, std::pair<SDValue, SDValue>, 8> ExpandedIntegers;
|
||||||
|
|
||||||
/// For floating-point nodes converted to integers of the same size, this map
|
/// For floating-point nodes converted to integers of the same size, this map
|
||||||
/// indicates the converted value to use.
|
/// indicates the converted value to use.
|
||||||
SmallDenseMap<TableId, TableId, 8> SoftenedFloats;
|
SmallDenseMap<SDValue, SDValue, 8> SoftenedFloats;
|
||||||
|
|
||||||
/// For floating-point nodes that have a smaller precision than the smallest
|
/// For floating-point nodes that have a smaller precision than the smallest
|
||||||
/// supported precision, this map indicates what promoted value to use.
|
/// supported precision, this map indicates what promoted value to use.
|
||||||
SmallDenseMap<TableId, TableId, 8> PromotedFloats;
|
SmallDenseMap<SDValue, SDValue, 8> PromotedFloats;
|
||||||
|
|
||||||
/// For float nodes that need to be expanded this map indicates which operands
|
/// For float nodes that need to be expanded this map indicates which operands
|
||||||
/// are the expanded version of the input.
|
/// are the expanded version of the input.
|
||||||
SmallDenseMap<TableId, std::pair<TableId, TableId>, 8> ExpandedFloats;
|
SmallDenseMap<SDValue, std::pair<SDValue, SDValue>, 8> ExpandedFloats;
|
||||||
|
|
||||||
/// For nodes that are <1 x ty>, this map indicates the scalar value of type
|
/// For nodes that are <1 x ty>, this map indicates the scalar value of type
|
||||||
/// 'ty' to use.
|
/// 'ty' to use.
|
||||||
SmallDenseMap<TableId, TableId, 8> ScalarizedVectors;
|
SmallDenseMap<SDValue, SDValue, 8> ScalarizedVectors;
|
||||||
|
|
||||||
/// For nodes that need to be split this map indicates which operands are the
|
/// For nodes that need to be split this map indicates which operands are the
|
||||||
/// expanded version of the input.
|
/// expanded version of the input.
|
||||||
SmallDenseMap<TableId, std::pair<TableId, TableId>, 8> SplitVectors;
|
SmallDenseMap<SDValue, std::pair<SDValue, SDValue>, 8> SplitVectors;
|
||||||
|
|
||||||
/// For vector nodes that need to be widened, indicates the widened value to
|
/// For vector nodes that need to be widened, indicates the widened value to
|
||||||
/// use.
|
/// use.
|
||||||
SmallDenseMap<TableId, TableId, 8> WidenedVectors;
|
SmallDenseMap<SDValue, SDValue, 8> WidenedVectors;
|
||||||
|
|
||||||
/// For values that have been replaced with another, indicates the replacement
|
/// For values that have been replaced with another, indicates the replacement
|
||||||
/// value to use.
|
/// value to use.
|
||||||
SmallDenseMap<TableId, TableId, 8> ReplacedValues;
|
SmallDenseMap<SDValue, SDValue, 8> ReplacedValues;
|
||||||
|
|
||||||
/// This defines a worklist of nodes to process. In order to be pushed onto
|
/// This defines a worklist of nodes to process. In order to be pushed onto
|
||||||
/// this worklist, all operands of a node must have already been processed.
|
/// this worklist, all operands of a node must have already been processed.
|
||||||
SmallVector<SDNode*, 128> Worklist;
|
SmallVector<SDNode*, 128> Worklist;
|
||||||
|
|
||||||
TableId getTableId(SDValue V) {
|
|
||||||
assert(V.getNode() && "Getting TableId on SDValue()");
|
|
||||||
|
|
||||||
auto I = ValueToIdMap.find(V);
|
|
||||||
if (I != ValueToIdMap.end()) {
|
|
||||||
// replace if there's been a shift.
|
|
||||||
RemapId(I->second);
|
|
||||||
assert(I->second && "All Ids should be nonzero");
|
|
||||||
return I->second;
|
|
||||||
}
|
|
||||||
// Add if it's not there.
|
|
||||||
ValueToIdMap.insert(std::make_pair(V, NextValueId));
|
|
||||||
IdToValueMap.insert(std::make_pair(NextValueId, V));
|
|
||||||
++NextValueId;
|
|
||||||
assert(NextValueId != 0 &&
|
|
||||||
"Ran out of Ids. Increase id type size or add compactification");
|
|
||||||
return NextValueId - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
const SDValue &getSDValue(TableId &Id) {
|
|
||||||
RemapId(Id);
|
|
||||||
assert(Id && "TableId should be non-zero");
|
|
||||||
return IdToValueMap[Id];
|
|
||||||
}
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
explicit DAGTypeLegalizer(SelectionDAG &dag)
|
explicit DAGTypeLegalizer(SelectionDAG &dag)
|
||||||
: TLI(dag.getTargetLoweringInfo()), DAG(dag),
|
: TLI(dag.getTargetLoweringInfo()), DAG(dag),
|
||||||
|
@ -182,24 +147,10 @@ public:
|
||||||
bool run();
|
bool run();
|
||||||
|
|
||||||
void NoteDeletion(SDNode *Old, SDNode *New) {
|
void NoteDeletion(SDNode *Old, SDNode *New) {
|
||||||
for (unsigned i = 0, e = Old->getNumValues(); i != e; ++i) {
|
ExpungeNode(Old);
|
||||||
TableId NewId = getTableId(SDValue(New, i));
|
ExpungeNode(New);
|
||||||
TableId OldId = getTableId(SDValue(Old, i));
|
for (unsigned i = 0, e = Old->getNumValues(); i != e; ++i)
|
||||||
|
ReplacedValues[SDValue(Old, i)] = SDValue(New, i);
|
||||||
ReplacedValues[OldId] = NewId;
|
|
||||||
|
|
||||||
// Delete Node from tables.
|
|
||||||
ValueToIdMap.erase(SDValue(Old, i));
|
|
||||||
IdToValueMap.erase(OldId);
|
|
||||||
PromotedIntegers.erase(OldId);
|
|
||||||
ExpandedIntegers.erase(OldId);
|
|
||||||
SoftenedFloats.erase(OldId);
|
|
||||||
PromotedFloats.erase(OldId);
|
|
||||||
ExpandedFloats.erase(OldId);
|
|
||||||
ScalarizedVectors.erase(OldId);
|
|
||||||
SplitVectors.erase(OldId);
|
|
||||||
WidenedVectors.erase(OldId);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectionDAG &getDAG() const { return DAG; }
|
SelectionDAG &getDAG() const { return DAG; }
|
||||||
|
@ -207,9 +158,9 @@ public:
|
||||||
private:
|
private:
|
||||||
SDNode *AnalyzeNewNode(SDNode *N);
|
SDNode *AnalyzeNewNode(SDNode *N);
|
||||||
void AnalyzeNewValue(SDValue &Val);
|
void AnalyzeNewValue(SDValue &Val);
|
||||||
|
void ExpungeNode(SDNode *N);
|
||||||
void PerformExpensiveChecks();
|
void PerformExpensiveChecks();
|
||||||
void RemapId(TableId &N);
|
void RemapValue(SDValue &N);
|
||||||
void RemapValue(SDValue &V);
|
|
||||||
|
|
||||||
// Common routines.
|
// Common routines.
|
||||||
SDValue BitConvertToInteger(SDValue Op);
|
SDValue BitConvertToInteger(SDValue Op);
|
||||||
|
@ -256,8 +207,8 @@ private:
|
||||||
/// returns an i32, the lower 16 bits of which coincide with Op, and the upper
|
/// returns an i32, the lower 16 bits of which coincide with Op, and the upper
|
||||||
/// 16 bits of which contain rubbish.
|
/// 16 bits of which contain rubbish.
|
||||||
SDValue GetPromotedInteger(SDValue Op) {
|
SDValue GetPromotedInteger(SDValue Op) {
|
||||||
TableId &PromotedId = PromotedIntegers[getTableId(Op)];
|
SDValue &PromotedOp = PromotedIntegers[Op];
|
||||||
SDValue PromotedOp = getSDValue(PromotedId);
|
RemapValue(PromotedOp);
|
||||||
assert(PromotedOp.getNode() && "Operand wasn't promoted?");
|
assert(PromotedOp.getNode() && "Operand wasn't promoted?");
|
||||||
return PromotedOp;
|
return PromotedOp;
|
||||||
}
|
}
|
||||||
|
@ -451,15 +402,16 @@ private:
|
||||||
/// stay in a register, the Op is not converted to an integer.
|
/// stay in a register, the Op is not converted to an integer.
|
||||||
/// In that case, the given op is returned.
|
/// In that case, the given op is returned.
|
||||||
SDValue GetSoftenedFloat(SDValue Op) {
|
SDValue GetSoftenedFloat(SDValue Op) {
|
||||||
TableId Id = getTableId(Op);
|
auto Iter = SoftenedFloats.find(Op);
|
||||||
auto Iter = SoftenedFloats.find(Id);
|
|
||||||
if (Iter == SoftenedFloats.end()) {
|
if (Iter == SoftenedFloats.end()) {
|
||||||
assert(isSimpleLegalType(Op.getValueType()) &&
|
assert(isSimpleLegalType(Op.getValueType()) &&
|
||||||
"Operand wasn't converted to integer?");
|
"Operand wasn't converted to integer?");
|
||||||
return Op;
|
return Op;
|
||||||
}
|
}
|
||||||
SDValue SoftenedOp = getSDValue(Iter->second);
|
|
||||||
|
SDValue &SoftenedOp = Iter->second;
|
||||||
assert(SoftenedOp.getNode() && "Unconverted op in SoftenedFloats?");
|
assert(SoftenedOp.getNode() && "Unconverted op in SoftenedFloats?");
|
||||||
|
RemapValue(SoftenedOp);
|
||||||
return SoftenedOp;
|
return SoftenedOp;
|
||||||
}
|
}
|
||||||
void SetSoftenedFloat(SDValue Op, SDValue Result);
|
void SetSoftenedFloat(SDValue Op, SDValue Result);
|
||||||
|
@ -596,8 +548,8 @@ private:
|
||||||
//===--------------------------------------------------------------------===//
|
//===--------------------------------------------------------------------===//
|
||||||
|
|
||||||
SDValue GetPromotedFloat(SDValue Op) {
|
SDValue GetPromotedFloat(SDValue Op) {
|
||||||
TableId &PromotedId = PromotedFloats[getTableId(Op)];
|
SDValue &PromotedOp = PromotedFloats[Op];
|
||||||
SDValue PromotedOp = getSDValue(PromotedId);
|
RemapValue(PromotedOp);
|
||||||
assert(PromotedOp.getNode() && "Operand wasn't promoted?");
|
assert(PromotedOp.getNode() && "Operand wasn't promoted?");
|
||||||
return PromotedOp;
|
return PromotedOp;
|
||||||
}
|
}
|
||||||
|
@ -636,8 +588,8 @@ private:
|
||||||
/// element type, this returns the element. For example, if Op is a v1i32,
|
/// element type, this returns the element. For example, if Op is a v1i32,
|
||||||
/// Op = < i32 val >, this method returns val, an i32.
|
/// Op = < i32 val >, this method returns val, an i32.
|
||||||
SDValue GetScalarizedVector(SDValue Op) {
|
SDValue GetScalarizedVector(SDValue Op) {
|
||||||
TableId &ScalarizedId = ScalarizedVectors[getTableId(Op)];
|
SDValue &ScalarizedOp = ScalarizedVectors[Op];
|
||||||
SDValue ScalarizedOp = getSDValue(ScalarizedId);
|
RemapValue(ScalarizedOp);
|
||||||
assert(ScalarizedOp.getNode() && "Operand wasn't scalarized?");
|
assert(ScalarizedOp.getNode() && "Operand wasn't scalarized?");
|
||||||
return ScalarizedOp;
|
return ScalarizedOp;
|
||||||
}
|
}
|
||||||
|
@ -748,8 +700,8 @@ private:
|
||||||
/// method returns a v4i32 for which the first two elements are the same as
|
/// method returns a v4i32 for which the first two elements are the same as
|
||||||
/// those of Op, while the last two elements contain rubbish.
|
/// those of Op, while the last two elements contain rubbish.
|
||||||
SDValue GetWidenedVector(SDValue Op) {
|
SDValue GetWidenedVector(SDValue Op) {
|
||||||
TableId &WidenedId = WidenedVectors[getTableId(Op)];
|
SDValue &WidenedOp = WidenedVectors[Op];
|
||||||
SDValue WidenedOp = getSDValue(WidenedId);
|
RemapValue(WidenedOp);
|
||||||
assert(WidenedOp.getNode() && "Operand wasn't widened?");
|
assert(WidenedOp.getNode() && "Operand wasn't widened?");
|
||||||
return WidenedOp;
|
return WidenedOp;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue