2016-02-12 01:44:59 +08:00
|
|
|
//===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
/// \file
|
|
|
|
/// This file implements the MachineIRBuidler class.
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
#include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
|
|
|
|
|
|
|
|
#include "llvm/CodeGen/MachineFunction.h"
|
|
|
|
#include "llvm/CodeGen/MachineInstr.h"
|
|
|
|
#include "llvm/CodeGen/MachineInstrBuilder.h"
|
2016-09-09 19:46:34 +08:00
|
|
|
#include "llvm/CodeGen/MachineRegisterInfo.h"
|
2017-11-08 09:01:31 +08:00
|
|
|
#include "llvm/CodeGen/TargetInstrInfo.h"
|
2017-11-17 09:07:10 +08:00
|
|
|
#include "llvm/CodeGen/TargetOpcodes.h"
|
|
|
|
#include "llvm/CodeGen/TargetSubtargetInfo.h"
|
2017-01-27 07:39:14 +08:00
|
|
|
#include "llvm/IR/DebugInfo.h"
|
2016-02-12 01:44:59 +08:00
|
|
|
|
|
|
|
using namespace llvm;
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
void MachineIRBuilderBase::setMF(MachineFunction &MF) {
|
|
|
|
State.MF = &MF;
|
|
|
|
State.MBB = nullptr;
|
|
|
|
State.MRI = &MF.getRegInfo();
|
|
|
|
State.TII = MF.getSubtarget().getInstrInfo();
|
|
|
|
State.DL = DebugLoc();
|
|
|
|
State.II = MachineBasicBlock::iterator();
|
|
|
|
State.InsertedInstr = nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MachineIRBuilderBase::setMBB(MachineBasicBlock &MBB) {
|
|
|
|
State.MBB = &MBB;
|
|
|
|
State.II = MBB.end();
|
2016-02-12 01:44:59 +08:00
|
|
|
assert(&getMF() == MBB.getParent() &&
|
|
|
|
"Basic block is in a different function");
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
void MachineIRBuilderBase::setInstr(MachineInstr &MI) {
|
2016-02-12 01:44:59 +08:00
|
|
|
assert(MI.getParent() && "Instruction is not part of a basic block");
|
2016-03-12 01:27:47 +08:00
|
|
|
setMBB(*MI.getParent());
|
2018-04-10 01:30:56 +08:00
|
|
|
State.II = MI.getIterator();
|
2016-02-12 01:44:59 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
void MachineIRBuilderBase::setInsertPt(MachineBasicBlock &MBB,
|
|
|
|
MachineBasicBlock::iterator II) {
|
2016-12-08 05:05:38 +08:00
|
|
|
assert(MBB.getParent() == &getMF() &&
|
|
|
|
"Basic block is in a different function");
|
2018-04-10 01:30:56 +08:00
|
|
|
State.MBB = &MBB;
|
|
|
|
State.II = II;
|
2016-02-12 01:44:59 +08:00
|
|
|
}
|
|
|
|
|
2018-05-10 01:28:18 +08:00
|
|
|
void MachineIRBuilderBase::recordInsertion(MachineInstr *InsertedInstr) const {
|
|
|
|
if (State.InsertedInstr)
|
|
|
|
State.InsertedInstr(InsertedInstr);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
void MachineIRBuilderBase::recordInsertions(
|
2016-08-26 01:37:32 +08:00
|
|
|
std::function<void(MachineInstr *)> Inserted) {
|
2018-04-10 01:30:56 +08:00
|
|
|
State.InsertedInstr = std::move(Inserted);
|
2016-08-26 01:37:32 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
void MachineIRBuilderBase::stopRecordingInsertions() {
|
|
|
|
State.InsertedInstr = nullptr;
|
2016-08-26 01:37:32 +08:00
|
|
|
}
|
|
|
|
|
2016-03-12 01:27:58 +08:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Build instruction variants.
|
|
|
|
//------------------------------------------------------------------------------
|
2016-07-27 00:45:26 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildInstr(unsigned Opcode) {
|
2016-09-22 21:49:25 +08:00
|
|
|
return insertInstr(buildInstrNoInsert(Opcode));
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildInstrNoInsert(unsigned Opcode) {
|
|
|
|
MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
|
2016-09-22 21:49:25 +08:00
|
|
|
return MIB;
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::insertInstr(MachineInstrBuilder MIB) {
|
2016-07-30 01:43:52 +08:00
|
|
|
getMBB().insert(getInsertPt(), MIB);
|
2018-05-10 01:28:18 +08:00
|
|
|
recordInsertion(MIB);
|
2016-07-30 01:43:52 +08:00
|
|
|
return MIB;
|
2016-02-12 02:53:28 +08:00
|
|
|
}
|
|
|
|
|
2017-08-02 06:37:35 +08:00
|
|
|
MachineInstrBuilder
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineIRBuilderBase::buildDirectDbgValue(unsigned Reg, const MDNode *Variable,
|
|
|
|
const MDNode *Expr) {
|
2017-01-27 07:39:14 +08:00
|
|
|
assert(isa<DILocalVariable>(Variable) && "not a variable");
|
|
|
|
assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(
|
|
|
|
cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
|
|
|
|
"Expected inlined-at fields to agree");
|
|
|
|
return insertInstr(BuildMI(getMF(), getDL(),
|
|
|
|
getTII().get(TargetOpcode::DBG_VALUE),
|
2017-08-02 06:37:35 +08:00
|
|
|
/*IsIndirect*/ false, Reg, Variable, Expr));
|
2017-01-27 07:39:14 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildIndirectDbgValue(
|
|
|
|
unsigned Reg, const MDNode *Variable, const MDNode *Expr) {
|
2017-01-27 07:39:14 +08:00
|
|
|
assert(isa<DILocalVariable>(Variable) && "not a variable");
|
|
|
|
assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(
|
|
|
|
cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
|
|
|
|
"Expected inlined-at fields to agree");
|
|
|
|
return insertInstr(BuildMI(getMF(), getDL(),
|
|
|
|
getTII().get(TargetOpcode::DBG_VALUE),
|
2017-08-02 06:37:35 +08:00
|
|
|
/*IsIndirect*/ true, Reg, Variable, Expr));
|
2017-01-27 07:39:14 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildFIDbgValue(int FI, const MDNode *Variable,
|
|
|
|
const MDNode *Expr) {
|
2017-01-27 07:39:14 +08:00
|
|
|
assert(isa<DILocalVariable>(Variable) && "not a variable");
|
|
|
|
assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(
|
|
|
|
cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
|
|
|
|
"Expected inlined-at fields to agree");
|
2017-01-27 07:39:14 +08:00
|
|
|
return buildInstr(TargetOpcode::DBG_VALUE)
|
|
|
|
.addFrameIndex(FI)
|
|
|
|
.addImm(0)
|
|
|
|
.addMetadata(Variable)
|
|
|
|
.addMetadata(Expr);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildConstDbgValue(
|
|
|
|
const Constant &C, const MDNode *Variable, const MDNode *Expr) {
|
2017-01-27 07:39:14 +08:00
|
|
|
assert(isa<DILocalVariable>(Variable) && "not a variable");
|
|
|
|
assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(
|
|
|
|
cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
|
|
|
|
"Expected inlined-at fields to agree");
|
2017-01-27 07:39:14 +08:00
|
|
|
auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
|
|
|
|
if (auto *CI = dyn_cast<ConstantInt>(&C)) {
|
|
|
|
if (CI->getBitWidth() > 64)
|
|
|
|
MIB.addCImm(CI);
|
|
|
|
else
|
|
|
|
MIB.addImm(CI->getZExtValue());
|
2017-03-08 04:34:20 +08:00
|
|
|
} else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
|
2017-03-08 04:52:57 +08:00
|
|
|
MIB.addFPImm(CFP);
|
2017-03-08 04:34:20 +08:00
|
|
|
} else {
|
|
|
|
// Insert %noreg if we didn't find a usable constant and had to drop it.
|
|
|
|
MIB.addReg(0U);
|
|
|
|
}
|
2017-01-27 07:39:14 +08:00
|
|
|
|
2017-07-29 06:46:20 +08:00
|
|
|
return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
|
2017-01-27 07:39:14 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildFrameIndex(unsigned Res,
|
|
|
|
int Idx) {
|
|
|
|
assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_FRAME_INDEX)
|
2016-07-30 01:43:52 +08:00
|
|
|
.addDef(Res)
|
|
|
|
.addFrameIndex(Idx);
|
2016-07-23 00:59:52 +08:00
|
|
|
}
|
2016-07-23 04:03:43 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildGlobalValue(unsigned Res, const GlobalValue *GV) {
|
|
|
|
assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
|
|
|
|
assert(getMRI()->getType(Res).getAddressSpace() ==
|
2016-09-12 20:10:41 +08:00
|
|
|
GV->getType()->getAddressSpace() &&
|
|
|
|
"address space mismatch");
|
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_GLOBAL_VALUE)
|
|
|
|
.addDef(Res)
|
|
|
|
.addGlobalAddress(GV);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
void MachineIRBuilderBase::validateBinaryOp(unsigned Res, unsigned Op0,
|
|
|
|
unsigned Op1) {
|
|
|
|
assert((getMRI()->getType(Res).isScalar() ||
|
|
|
|
getMRI()->getType(Res).isVector()) &&
|
2016-09-09 19:46:58 +08:00
|
|
|
"invalid operand type");
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
|
|
|
|
getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
|
2017-07-05 19:02:31 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildGEP(unsigned Res, unsigned Op0,
|
|
|
|
unsigned Op1) {
|
|
|
|
assert(getMRI()->getType(Res).isPointer() &&
|
|
|
|
getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
|
|
|
|
assert(getMRI()->getType(Op1).isScalar() && "invalid offset type");
|
2016-09-12 19:20:22 +08:00
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_GEP)
|
|
|
|
.addDef(Res)
|
|
|
|
.addUse(Op0)
|
|
|
|
.addUse(Op1);
|
|
|
|
}
|
|
|
|
|
[globalisel][legalizer] G_LOAD/G_STORE NarrowScalar should not emit G_GEP x, 0.
Summary:
When legalizing G_LOAD/G_STORE using NarrowScalar, we should avoid emitting
%0 = G_CONSTANT ty 0
%1 = G_GEP %x, %0
since it's cheaper to not emit the redundant instructions than it is to fold them
away later.
Reviewers: qcolombet, t.p.northover, ab, rovka, aditya_nandakumar, kristof.beyls
Reviewed By: qcolombet
Subscribers: javed.absar, llvm-commits, igorb
Differential Revision: https://reviews.llvm.org/D32746
llvm-svn: 305340
2017-06-14 07:42:32 +08:00
|
|
|
Optional<MachineInstrBuilder>
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineIRBuilderBase::materializeGEP(unsigned &Res, unsigned Op0,
|
|
|
|
const LLT &ValueTy, uint64_t Value) {
|
[globalisel][legalizer] G_LOAD/G_STORE NarrowScalar should not emit G_GEP x, 0.
Summary:
When legalizing G_LOAD/G_STORE using NarrowScalar, we should avoid emitting
%0 = G_CONSTANT ty 0
%1 = G_GEP %x, %0
since it's cheaper to not emit the redundant instructions than it is to fold them
away later.
Reviewers: qcolombet, t.p.northover, ab, rovka, aditya_nandakumar, kristof.beyls
Reviewed By: qcolombet
Subscribers: javed.absar, llvm-commits, igorb
Differential Revision: https://reviews.llvm.org/D32746
llvm-svn: 305340
2017-06-14 07:42:32 +08:00
|
|
|
assert(Res == 0 && "Res is a result argument");
|
|
|
|
assert(ValueTy.isScalar() && "invalid offset type");
|
|
|
|
|
|
|
|
if (Value == 0) {
|
|
|
|
Res = Op0;
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
|
|
|
|
unsigned TmpReg = getMRI()->createGenericVirtualRegister(ValueTy);
|
[globalisel][legalizer] G_LOAD/G_STORE NarrowScalar should not emit G_GEP x, 0.
Summary:
When legalizing G_LOAD/G_STORE using NarrowScalar, we should avoid emitting
%0 = G_CONSTANT ty 0
%1 = G_GEP %x, %0
since it's cheaper to not emit the redundant instructions than it is to fold them
away later.
Reviewers: qcolombet, t.p.northover, ab, rovka, aditya_nandakumar, kristof.beyls
Reviewed By: qcolombet
Subscribers: javed.absar, llvm-commits, igorb
Differential Revision: https://reviews.llvm.org/D32746
llvm-svn: 305340
2017-06-14 07:42:32 +08:00
|
|
|
|
|
|
|
buildConstant(TmpReg, Value);
|
|
|
|
return buildGEP(Res, Op0, TmpReg);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildPtrMask(unsigned Res,
|
|
|
|
unsigned Op0,
|
|
|
|
uint32_t NumBits) {
|
|
|
|
assert(getMRI()->getType(Res).isPointer() &&
|
|
|
|
getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
|
2017-02-15 04:56:18 +08:00
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_PTR_MASK)
|
|
|
|
.addDef(Res)
|
|
|
|
.addUse(Op0)
|
|
|
|
.addImm(NumBits);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildBr(MachineBasicBlock &Dest) {
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
|
2016-07-27 00:45:26 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildBrIndirect(unsigned Tgt) {
|
|
|
|
assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
|
2017-01-30 17:13:18 +08:00
|
|
|
return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildCopy(unsigned Res, unsigned Op) {
|
|
|
|
assert(getMRI()->getType(Res) == LLT() || getMRI()->getType(Op) == LLT() ||
|
|
|
|
getMRI()->getType(Res) == getMRI()->getType(Op));
|
2016-07-30 01:43:52 +08:00
|
|
|
return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op);
|
2016-07-27 00:45:30 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildConstant(unsigned Res, const ConstantInt &Val) {
|
|
|
|
LLT Ty = getMRI()->getType(Res);
|
2016-12-06 05:47:07 +08:00
|
|
|
|
2016-12-06 18:14:36 +08:00
|
|
|
assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type");
|
2016-12-06 05:47:07 +08:00
|
|
|
|
|
|
|
const ConstantInt *NewVal = &Val;
|
|
|
|
if (Ty.getSizeInBits() != Val.getBitWidth())
|
2018-04-10 01:30:56 +08:00
|
|
|
NewVal = ConstantInt::get(getMF().getFunction().getContext(),
|
2016-12-06 05:47:07 +08:00
|
|
|
Val.getValue().sextOrTrunc(Ty.getSizeInBits()));
|
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal);
|
|
|
|
}
|
2016-09-09 19:46:58 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildConstant(unsigned Res,
|
|
|
|
int64_t Val) {
|
|
|
|
auto IntN = IntegerType::get(getMF().getFunction().getContext(),
|
|
|
|
getMRI()->getType(Res).getSizeInBits());
|
2016-12-06 05:47:07 +08:00
|
|
|
ConstantInt *CI = ConstantInt::get(IntN, Val, true);
|
|
|
|
return buildConstant(Res, *CI);
|
2016-08-05 04:54:13 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildFConstant(unsigned Res, const ConstantFP &Val) {
|
|
|
|
assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
|
2016-09-09 19:46:58 +08:00
|
|
|
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val);
|
2016-08-20 04:09:15 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildFConstant(unsigned Res,
|
|
|
|
double Val) {
|
|
|
|
LLT DstTy = getMRI()->getType(Res);
|
|
|
|
auto &Ctx = getMF().getFunction().getContext();
|
2018-03-10 01:31:51 +08:00
|
|
|
auto *CFP =
|
|
|
|
ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getSizeInBits()));
|
|
|
|
return buildFConstant(Res, *CFP);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildBrCond(unsigned Tst,
|
|
|
|
MachineBasicBlock &Dest) {
|
|
|
|
assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
|
2016-09-09 19:46:58 +08:00
|
|
|
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
|
2016-07-30 01:58:00 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildLoad(unsigned Res, unsigned Addr,
|
|
|
|
MachineMemOperand &MMO) {
|
[globalisel][legalizerinfo] Introduce dedicated extending loads and add lowerings for them
Summary:
Previously, a extending load was represented at (G_*EXT (G_LOAD x)).
This had a few drawbacks:
* G_LOAD had to be legal for all sizes you could extend from, even if
registers didn't naturally hold those sizes.
* All sizes you could extend from had to be allocatable just in case the
extend went missing (e.g. by optimization).
* At minimum, G_*EXT and G_TRUNC had to be legal for these sizes. As we
improve optimization of extends and truncates, this legality requirement
would spread without considerable care w.r.t when certain combines were
permitted.
* The SelectionDAG importer required some ugly and fragile pattern
rewriting to translate patterns into this style.
This patch begins changing the representation to:
* (G_[SZ]EXTLOAD x)
* (G_LOAD x) any-extends when MMO.getSize() * 8 < ResultTy.getSizeInBits()
which resolves these issues by allowing targets to work entirely in their
native register sizes, and by having a more direct translation from
SelectionDAG patterns.
This patch introduces the new generic instructions and new variation on
G_LOAD and adds lowering for them to convert back to the existing
representations.
Depends on D45466
Reviewers: ab, aditya_nandakumar, bogner, rtereshin, volkan, rovka, aemerson, javed.absar
Reviewed By: aemerson
Subscribers: aemerson, kristof.beyls, javed.absar, llvm-commits
Differential Revision: https://reviews.llvm.org/D45540
llvm-svn: 331115
2018-04-29 02:14:50 +08:00
|
|
|
return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
|
|
|
|
}
|
|
|
|
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildLoadInstr(unsigned Opcode, unsigned Res,
|
|
|
|
unsigned Addr, MachineMemOperand &MMO) {
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Res).isValid() && "invalid operand type");
|
|
|
|
assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
|
2016-09-09 19:46:58 +08:00
|
|
|
|
[globalisel][legalizerinfo] Introduce dedicated extending loads and add lowerings for them
Summary:
Previously, a extending load was represented at (G_*EXT (G_LOAD x)).
This had a few drawbacks:
* G_LOAD had to be legal for all sizes you could extend from, even if
registers didn't naturally hold those sizes.
* All sizes you could extend from had to be allocatable just in case the
extend went missing (e.g. by optimization).
* At minimum, G_*EXT and G_TRUNC had to be legal for these sizes. As we
improve optimization of extends and truncates, this legality requirement
would spread without considerable care w.r.t when certain combines were
permitted.
* The SelectionDAG importer required some ugly and fragile pattern
rewriting to translate patterns into this style.
This patch begins changing the representation to:
* (G_[SZ]EXTLOAD x)
* (G_LOAD x) any-extends when MMO.getSize() * 8 < ResultTy.getSizeInBits()
which resolves these issues by allowing targets to work entirely in their
native register sizes, and by having a more direct translation from
SelectionDAG patterns.
This patch introduces the new generic instructions and new variation on
G_LOAD and adds lowering for them to convert back to the existing
representations.
Depends on D45466
Reviewers: ab, aditya_nandakumar, bogner, rtereshin, volkan, rovka, aemerson, javed.absar
Reviewed By: aemerson
Subscribers: aemerson, kristof.beyls, javed.absar, llvm-commits
Differential Revision: https://reviews.llvm.org/D45540
llvm-svn: 331115
2018-04-29 02:14:50 +08:00
|
|
|
return buildInstr(Opcode)
|
2016-07-30 01:43:52 +08:00
|
|
|
.addDef(Res)
|
|
|
|
.addUse(Addr)
|
|
|
|
.addMemOperand(&MMO);
|
2016-07-27 04:23:26 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildStore(unsigned Val,
|
|
|
|
unsigned Addr,
|
|
|
|
MachineMemOperand &MMO) {
|
|
|
|
assert(getMRI()->getType(Val).isValid() && "invalid operand type");
|
|
|
|
assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
|
2016-09-09 19:46:58 +08:00
|
|
|
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_STORE)
|
2016-07-30 01:43:52 +08:00
|
|
|
.addUse(Val)
|
|
|
|
.addUse(Addr)
|
|
|
|
.addMemOperand(&MMO);
|
2016-07-27 04:23:26 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildUAdde(unsigned Res,
|
|
|
|
unsigned CarryOut,
|
|
|
|
unsigned Op0, unsigned Op1,
|
|
|
|
unsigned CarryIn) {
|
|
|
|
assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
|
|
|
|
assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
|
|
|
|
getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
|
|
|
|
assert(getMRI()->getType(CarryOut).isScalar() && "invalid operand type");
|
|
|
|
assert(getMRI()->getType(CarryOut) == getMRI()->getType(CarryIn) &&
|
|
|
|
"type mismatch");
|
2016-09-09 19:46:58 +08:00
|
|
|
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_UADDE)
|
2016-08-05 04:54:13 +08:00
|
|
|
.addDef(Res)
|
|
|
|
.addDef(CarryOut)
|
|
|
|
.addUse(Op0)
|
|
|
|
.addUse(Op1)
|
|
|
|
.addUse(CarryIn);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildAnyExt(unsigned Res,
|
|
|
|
unsigned Op) {
|
2016-09-09 19:46:34 +08:00
|
|
|
validateTruncExt(Res, Op, true);
|
|
|
|
return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op);
|
2016-08-05 02:35:11 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildSExt(unsigned Res, unsigned Op) {
|
2016-09-09 19:46:34 +08:00
|
|
|
validateTruncExt(Res, Op, true);
|
|
|
|
return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op);
|
2016-08-24 05:01:26 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildZExt(unsigned Res, unsigned Op) {
|
2016-09-09 19:46:34 +08:00
|
|
|
validateTruncExt(Res, Op, true);
|
|
|
|
return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op);
|
2016-08-24 05:01:26 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildExtOrTrunc(unsigned ExtOpc,
|
|
|
|
unsigned Res,
|
|
|
|
unsigned Op) {
|
2017-08-25 12:57:27 +08:00
|
|
|
assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
|
|
|
|
TargetOpcode::G_SEXT == ExtOpc) &&
|
|
|
|
"Expecting Extending Opc");
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Res).isScalar() ||
|
|
|
|
getMRI()->getType(Res).isVector());
|
|
|
|
assert(getMRI()->getType(Res).isScalar() == getMRI()->getType(Op).isScalar());
|
2017-06-28 06:45:35 +08:00
|
|
|
|
2016-09-12 19:20:22 +08:00
|
|
|
unsigned Opcode = TargetOpcode::COPY;
|
2018-04-10 01:30:56 +08:00
|
|
|
if (getMRI()->getType(Res).getSizeInBits() >
|
|
|
|
getMRI()->getType(Op).getSizeInBits())
|
2017-08-25 12:57:27 +08:00
|
|
|
Opcode = ExtOpc;
|
2018-04-10 01:30:56 +08:00
|
|
|
else if (getMRI()->getType(Res).getSizeInBits() <
|
|
|
|
getMRI()->getType(Op).getSizeInBits())
|
2016-09-12 19:20:22 +08:00
|
|
|
Opcode = TargetOpcode::G_TRUNC;
|
2017-06-28 06:45:35 +08:00
|
|
|
else
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Res) == getMRI()->getType(Op));
|
2016-09-12 19:20:22 +08:00
|
|
|
|
|
|
|
return buildInstr(Opcode).addDef(Res).addUse(Op);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildSExtOrTrunc(unsigned Res,
|
|
|
|
unsigned Op) {
|
2017-08-25 12:57:27 +08:00
|
|
|
return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
|
|
|
|
}
|
2017-06-28 06:45:35 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildZExtOrTrunc(unsigned Res,
|
|
|
|
unsigned Op) {
|
2017-08-25 12:57:27 +08:00
|
|
|
return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
|
|
|
|
}
|
2017-02-04 02:22:45 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildAnyExtOrTrunc(unsigned Res,
|
|
|
|
unsigned Op) {
|
2017-08-25 12:57:27 +08:00
|
|
|
return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
|
2017-02-04 02:22:45 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildCast(unsigned Dst,
|
|
|
|
unsigned Src) {
|
|
|
|
LLT SrcTy = getMRI()->getType(Src);
|
|
|
|
LLT DstTy = getMRI()->getType(Dst);
|
2017-03-07 03:04:17 +08:00
|
|
|
if (SrcTy == DstTy)
|
|
|
|
return buildCopy(Dst, Src);
|
|
|
|
|
|
|
|
unsigned Opcode;
|
|
|
|
if (SrcTy.isPointer() && DstTy.isScalar())
|
|
|
|
Opcode = TargetOpcode::G_PTRTOINT;
|
|
|
|
else if (DstTy.isPointer() && SrcTy.isScalar())
|
|
|
|
Opcode = TargetOpcode::G_INTTOPTR;
|
|
|
|
else {
|
|
|
|
assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
|
|
|
|
Opcode = TargetOpcode::G_BITCAST;
|
|
|
|
}
|
|
|
|
|
|
|
|
return buildInstr(Opcode).addDef(Dst).addUse(Src);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildExtract(unsigned Res, unsigned Src, uint64_t Index) {
|
2016-09-09 19:46:58 +08:00
|
|
|
#ifndef NDEBUG
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Src).isValid() && "invalid operand type");
|
|
|
|
assert(getMRI()->getType(Res).isValid() && "invalid operand type");
|
|
|
|
assert(Index + getMRI()->getType(Res).getSizeInBits() <=
|
|
|
|
getMRI()->getType(Src).getSizeInBits() &&
|
2017-03-07 07:50:28 +08:00
|
|
|
"extracting off end of register");
|
2016-09-09 19:46:58 +08:00
|
|
|
#endif
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
if (getMRI()->getType(Res).getSizeInBits() ==
|
|
|
|
getMRI()->getType(Src).getSizeInBits()) {
|
2017-03-07 07:50:28 +08:00
|
|
|
assert(Index == 0 && "insertion past the end of a register");
|
|
|
|
return buildCast(Res, Src);
|
|
|
|
}
|
2016-08-20 02:32:14 +08:00
|
|
|
|
2017-03-07 07:50:28 +08:00
|
|
|
return buildInstr(TargetOpcode::G_EXTRACT)
|
|
|
|
.addDef(Res)
|
|
|
|
.addUse(Src)
|
|
|
|
.addImm(Index);
|
2016-07-23 04:03:43 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
void MachineIRBuilderBase::buildSequence(unsigned Res, ArrayRef<unsigned> Ops,
|
|
|
|
ArrayRef<uint64_t> Indices) {
|
2016-09-09 19:46:58 +08:00
|
|
|
#ifndef NDEBUG
|
2016-09-09 19:46:34 +08:00
|
|
|
assert(Ops.size() == Indices.size() && "incompatible args");
|
2016-08-20 02:32:14 +08:00
|
|
|
assert(!Ops.empty() && "invalid trivial sequence");
|
2016-08-31 04:51:25 +08:00
|
|
|
assert(std::is_sorted(Indices.begin(), Indices.end()) &&
|
|
|
|
"sequence offsets must be in ascending order");
|
2016-08-20 01:17:06 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Res).isValid() && "invalid operand type");
|
2016-09-09 19:46:58 +08:00
|
|
|
for (auto Op : Ops)
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Op).isValid() && "invalid operand type");
|
2016-09-09 19:46:58 +08:00
|
|
|
#endif
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
LLT ResTy = getMRI()->getType(Res);
|
|
|
|
LLT OpTy = getMRI()->getType(Ops[0]);
|
2017-06-24 00:15:37 +08:00
|
|
|
unsigned OpSize = OpTy.getSizeInBits();
|
|
|
|
bool MaybeMerge = true;
|
2016-08-20 01:17:06 +08:00
|
|
|
for (unsigned i = 0; i < Ops.size(); ++i) {
|
2018-04-10 01:30:56 +08:00
|
|
|
if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
|
2017-06-24 00:15:37 +08:00
|
|
|
MaybeMerge = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
|
|
|
|
buildMerge(Res, Ops);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
unsigned ResIn = getMRI()->createGenericVirtualRegister(ResTy);
|
2017-06-24 00:15:37 +08:00
|
|
|
buildUndef(ResIn);
|
|
|
|
|
|
|
|
for (unsigned i = 0; i < Ops.size(); ++i) {
|
2018-04-10 01:30:56 +08:00
|
|
|
unsigned ResOut = i + 1 == Ops.size()
|
|
|
|
? Res
|
|
|
|
: getMRI()->createGenericVirtualRegister(ResTy);
|
2017-06-24 00:15:37 +08:00
|
|
|
buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
|
|
|
|
ResIn = ResOut;
|
2016-08-20 01:17:06 +08:00
|
|
|
}
|
2017-03-04 06:46:09 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildUndef(unsigned Res) {
|
2017-07-01 04:27:36 +08:00
|
|
|
return buildInstr(TargetOpcode::G_IMPLICIT_DEF).addDef(Res);
|
2017-03-07 02:36:40 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildMerge(unsigned Res,
|
|
|
|
ArrayRef<unsigned> Ops) {
|
2017-03-04 06:46:09 +08:00
|
|
|
|
|
|
|
#ifndef NDEBUG
|
|
|
|
assert(!Ops.empty() && "invalid trivial sequence");
|
2018-04-10 01:30:56 +08:00
|
|
|
LLT Ty = getMRI()->getType(Ops[0]);
|
2017-03-04 06:46:09 +08:00
|
|
|
for (auto Reg : Ops)
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
|
|
|
|
assert(Ops.size() * getMRI()->getType(Ops[0]).getSizeInBits() ==
|
|
|
|
getMRI()->getType(Res).getSizeInBits() &&
|
2017-03-04 06:46:09 +08:00
|
|
|
"input operands do not cover output register");
|
|
|
|
#endif
|
|
|
|
|
2017-06-27 04:34:13 +08:00
|
|
|
if (Ops.size() == 1)
|
2017-06-28 05:41:40 +08:00
|
|
|
return buildCast(Res, Ops[0]);
|
2017-06-27 04:34:13 +08:00
|
|
|
|
2017-03-04 06:46:09 +08:00
|
|
|
MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_MERGE_VALUES);
|
|
|
|
MIB.addDef(Res);
|
|
|
|
for (unsigned i = 0; i < Ops.size(); ++i)
|
|
|
|
MIB.addUse(Ops[i]);
|
|
|
|
return MIB;
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildUnmerge(ArrayRef<unsigned> Res,
|
|
|
|
unsigned Op) {
|
2017-03-04 06:46:09 +08:00
|
|
|
|
|
|
|
#ifndef NDEBUG
|
|
|
|
assert(!Res.empty() && "invalid trivial sequence");
|
2018-04-10 01:30:56 +08:00
|
|
|
LLT Ty = getMRI()->getType(Res[0]);
|
2017-03-04 06:46:09 +08:00
|
|
|
for (auto Reg : Res)
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
|
|
|
|
assert(Res.size() * getMRI()->getType(Res[0]).getSizeInBits() ==
|
|
|
|
getMRI()->getType(Op).getSizeInBits() &&
|
2017-03-04 06:46:09 +08:00
|
|
|
"input operands do not cover output register");
|
|
|
|
#endif
|
|
|
|
|
|
|
|
MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_UNMERGE_VALUES);
|
|
|
|
for (unsigned i = 0; i < Res.size(); ++i)
|
|
|
|
MIB.addDef(Res[i]);
|
|
|
|
MIB.addUse(Op);
|
|
|
|
return MIB;
|
2016-07-23 04:03:43 +08:00
|
|
|
}
|
2016-07-30 06:32:36 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildInsert(unsigned Res,
|
|
|
|
unsigned Src, unsigned Op,
|
|
|
|
unsigned Index) {
|
|
|
|
assert(Index + getMRI()->getType(Op).getSizeInBits() <=
|
|
|
|
getMRI()->getType(Res).getSizeInBits() &&
|
2017-06-28 06:45:35 +08:00
|
|
|
"insertion past the end of a register");
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
if (getMRI()->getType(Res).getSizeInBits() ==
|
|
|
|
getMRI()->getType(Op).getSizeInBits()) {
|
2017-03-07 03:04:17 +08:00
|
|
|
return buildCast(Res, Op);
|
|
|
|
}
|
|
|
|
|
2017-03-04 07:05:47 +08:00
|
|
|
return buildInstr(TargetOpcode::G_INSERT)
|
|
|
|
.addDef(Res)
|
|
|
|
.addUse(Src)
|
|
|
|
.addUse(Op)
|
|
|
|
.addImm(Index);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildIntrinsic(Intrinsic::ID ID,
|
|
|
|
unsigned Res,
|
|
|
|
bool HasSideEffects) {
|
2016-07-30 06:32:36 +08:00
|
|
|
auto MIB =
|
|
|
|
buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
|
2016-09-09 19:46:34 +08:00
|
|
|
: TargetOpcode::G_INTRINSIC);
|
2016-07-30 06:32:36 +08:00
|
|
|
if (Res)
|
|
|
|
MIB.addDef(Res);
|
|
|
|
MIB.addIntrinsicID(ID);
|
|
|
|
return MIB;
|
|
|
|
}
|
2016-08-05 02:35:11 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildTrunc(unsigned Res,
|
|
|
|
unsigned Op) {
|
2016-09-09 19:46:34 +08:00
|
|
|
validateTruncExt(Res, Op, false);
|
|
|
|
return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op);
|
2016-08-05 02:35:11 +08:00
|
|
|
}
|
2016-08-18 04:25:25 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildFPTrunc(unsigned Res,
|
|
|
|
unsigned Op) {
|
2016-09-09 19:46:34 +08:00
|
|
|
validateTruncExt(Res, Op, false);
|
|
|
|
return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op);
|
2016-08-20 06:40:08 +08:00
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildICmp(CmpInst::Predicate Pred,
|
|
|
|
unsigned Res, unsigned Op0,
|
|
|
|
unsigned Op1) {
|
2016-09-09 19:46:58 +08:00
|
|
|
#ifndef NDEBUG
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Op0) == getMRI()->getType(Op0) && "type mismatch");
|
2016-09-09 19:46:58 +08:00
|
|
|
assert(CmpInst::isIntPredicate(Pred) && "invalid predicate");
|
2018-04-10 01:30:56 +08:00
|
|
|
if (getMRI()->getType(Op0).isScalar() || getMRI()->getType(Op0).isPointer())
|
|
|
|
assert(getMRI()->getType(Res).isScalar() && "type mismatch");
|
2016-09-09 19:46:58 +08:00
|
|
|
else
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Res).isVector() &&
|
|
|
|
getMRI()->getType(Res).getNumElements() ==
|
|
|
|
getMRI()->getType(Op0).getNumElements() &&
|
2016-09-09 19:46:58 +08:00
|
|
|
"type mismatch");
|
|
|
|
#endif
|
|
|
|
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_ICMP)
|
2016-08-18 04:25:25 +08:00
|
|
|
.addDef(Res)
|
|
|
|
.addPredicate(Pred)
|
|
|
|
.addUse(Op0)
|
|
|
|
.addUse(Op1);
|
|
|
|
}
|
2016-08-20 04:09:07 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildFCmp(CmpInst::Predicate Pred,
|
|
|
|
unsigned Res, unsigned Op0,
|
|
|
|
unsigned Op1) {
|
2016-09-09 19:46:58 +08:00
|
|
|
#ifndef NDEBUG
|
2018-04-10 01:30:56 +08:00
|
|
|
assert((getMRI()->getType(Op0).isScalar() ||
|
|
|
|
getMRI()->getType(Op0).isVector()) &&
|
2016-09-09 19:46:58 +08:00
|
|
|
"invalid operand type");
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Op0) == getMRI()->getType(Op1) && "type mismatch");
|
2016-09-09 19:46:58 +08:00
|
|
|
assert(CmpInst::isFPPredicate(Pred) && "invalid predicate");
|
2018-04-10 01:30:56 +08:00
|
|
|
if (getMRI()->getType(Op0).isScalar())
|
|
|
|
assert(getMRI()->getType(Res).isScalar() && "type mismatch");
|
2016-09-09 19:46:58 +08:00
|
|
|
else
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Res).isVector() &&
|
|
|
|
getMRI()->getType(Res).getNumElements() ==
|
|
|
|
getMRI()->getType(Op0).getNumElements() &&
|
2016-09-09 19:46:58 +08:00
|
|
|
"type mismatch");
|
|
|
|
#endif
|
|
|
|
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_FCMP)
|
2016-08-20 04:48:16 +08:00
|
|
|
.addDef(Res)
|
|
|
|
.addPredicate(Pred)
|
|
|
|
.addUse(Op0)
|
|
|
|
.addUse(Op1);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildSelect(unsigned Res,
|
|
|
|
unsigned Tst,
|
|
|
|
unsigned Op0,
|
|
|
|
unsigned Op1) {
|
2016-09-09 19:46:58 +08:00
|
|
|
#ifndef NDEBUG
|
2018-04-10 01:30:56 +08:00
|
|
|
LLT ResTy = getMRI()->getType(Res);
|
2016-12-07 02:38:34 +08:00
|
|
|
assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
|
2016-09-09 19:46:58 +08:00
|
|
|
"invalid operand type");
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(ResTy == getMRI()->getType(Op0) && ResTy == getMRI()->getType(Op1) &&
|
2016-12-07 02:38:34 +08:00
|
|
|
"type mismatch");
|
|
|
|
if (ResTy.isScalar() || ResTy.isPointer())
|
2018-04-10 01:30:56 +08:00
|
|
|
assert(getMRI()->getType(Tst).isScalar() && "type mismatch");
|
2016-09-09 19:46:58 +08:00
|
|
|
else
|
2018-04-10 01:30:56 +08:00
|
|
|
assert((getMRI()->getType(Tst).isScalar() ||
|
|
|
|
(getMRI()->getType(Tst).isVector() &&
|
|
|
|
getMRI()->getType(Tst).getNumElements() ==
|
|
|
|
getMRI()->getType(Op0).getNumElements())) &&
|
2016-09-09 19:46:58 +08:00
|
|
|
"type mismatch");
|
|
|
|
#endif
|
|
|
|
|
2016-09-09 19:46:34 +08:00
|
|
|
return buildInstr(TargetOpcode::G_SELECT)
|
2016-08-20 04:09:07 +08:00
|
|
|
.addDef(Res)
|
|
|
|
.addUse(Tst)
|
|
|
|
.addUse(Op0)
|
|
|
|
.addUse(Op1);
|
|
|
|
}
|
2016-08-24 05:01:33 +08:00
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildInsertVectorElement(unsigned Res, unsigned Val,
|
|
|
|
unsigned Elt, unsigned Idx) {
|
2017-03-11 03:08:28 +08:00
|
|
|
#ifndef NDEBUG
|
2018-04-10 01:30:56 +08:00
|
|
|
LLT ResTy = getMRI()->getType(Res);
|
|
|
|
LLT ValTy = getMRI()->getType(Val);
|
|
|
|
LLT EltTy = getMRI()->getType(Elt);
|
|
|
|
LLT IdxTy = getMRI()->getType(Idx);
|
2017-03-11 03:08:28 +08:00
|
|
|
assert(ResTy.isVector() && ValTy.isVector() && "invalid operand type");
|
2017-04-19 15:23:57 +08:00
|
|
|
assert(IdxTy.isScalar() && "invalid operand type");
|
2017-03-11 03:08:28 +08:00
|
|
|
assert(ResTy.getNumElements() == ValTy.getNumElements() && "type mismatch");
|
|
|
|
assert(ResTy.getElementType() == EltTy && "type mismatch");
|
|
|
|
#endif
|
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT)
|
|
|
|
.addDef(Res)
|
|
|
|
.addUse(Val)
|
|
|
|
.addUse(Elt)
|
|
|
|
.addUse(Idx);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildExtractVectorElement(unsigned Res, unsigned Val,
|
|
|
|
unsigned Idx) {
|
2017-03-11 03:08:28 +08:00
|
|
|
#ifndef NDEBUG
|
2018-04-10 01:30:56 +08:00
|
|
|
LLT ResTy = getMRI()->getType(Res);
|
|
|
|
LLT ValTy = getMRI()->getType(Val);
|
|
|
|
LLT IdxTy = getMRI()->getType(Idx);
|
2017-03-11 03:08:28 +08:00
|
|
|
assert(ValTy.isVector() && "invalid operand type");
|
2017-04-19 15:23:57 +08:00
|
|
|
assert((ResTy.isScalar() || ResTy.isPointer()) && "invalid operand type");
|
|
|
|
assert(IdxTy.isScalar() && "invalid operand type");
|
2017-03-11 03:08:28 +08:00
|
|
|
assert(ValTy.getElementType() == ResTy && "type mismatch");
|
|
|
|
#endif
|
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT)
|
|
|
|
.addDef(Res)
|
|
|
|
.addUse(Val)
|
|
|
|
.addUse(Idx);
|
|
|
|
}
|
|
|
|
|
[globalisel][irtranslator] Add support for atomicrmw and (strong) cmpxchg
Summary:
This patch adds support for the atomicrmw instructions and the strong
cmpxchg instruction to the IRTranslator.
I've left out weak cmpxchg because LangRef.rst isn't entirely clear on what
difference it makes to the backend. As far as I can tell from the code, it
only matters to AtomicExpandPass which is run at the LLVM-IR level.
Reviewers: ab, t.p.northover, qcolombet, rovka, aditya_nandakumar, volkan, javed.absar
Reviewed By: qcolombet
Subscribers: kristof.beyls, javed.absar, igorb, llvm-commits
Differential Revision: https://reviews.llvm.org/D40092
llvm-svn: 336589
2018-07-10 03:33:40 +08:00
|
|
|
MachineInstrBuilder MachineIRBuilderBase::buildAtomicCmpXchgWithSuccess(
|
|
|
|
unsigned OldValRes, unsigned SuccessRes, unsigned Addr, unsigned CmpVal,
|
|
|
|
unsigned NewVal, MachineMemOperand &MMO) {
|
|
|
|
#ifndef NDEBUG
|
|
|
|
LLT OldValResTy = getMRI()->getType(OldValRes);
|
|
|
|
LLT SuccessResTy = getMRI()->getType(SuccessRes);
|
|
|
|
LLT AddrTy = getMRI()->getType(Addr);
|
|
|
|
LLT CmpValTy = getMRI()->getType(CmpVal);
|
|
|
|
LLT NewValTy = getMRI()->getType(NewVal);
|
|
|
|
assert(OldValResTy.isScalar() && "invalid operand type");
|
|
|
|
assert(SuccessResTy.isScalar() && "invalid operand type");
|
|
|
|
assert(AddrTy.isPointer() && "invalid operand type");
|
|
|
|
assert(CmpValTy.isValid() && "invalid operand type");
|
|
|
|
assert(NewValTy.isValid() && "invalid operand type");
|
|
|
|
assert(OldValResTy == CmpValTy && "type mismatch");
|
|
|
|
assert(OldValResTy == NewValTy && "type mismatch");
|
|
|
|
#endif
|
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
|
|
|
|
.addDef(OldValRes)
|
|
|
|
.addDef(SuccessRes)
|
|
|
|
.addUse(Addr)
|
|
|
|
.addUse(CmpVal)
|
|
|
|
.addUse(NewVal)
|
|
|
|
.addMemOperand(&MMO);
|
|
|
|
}
|
|
|
|
|
2017-12-01 04:11:42 +08:00
|
|
|
MachineInstrBuilder
|
2018-04-10 01:30:56 +08:00
|
|
|
MachineIRBuilderBase::buildAtomicCmpXchg(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned CmpVal, unsigned NewVal,
|
|
|
|
MachineMemOperand &MMO) {
|
2017-12-01 04:11:42 +08:00
|
|
|
#ifndef NDEBUG
|
2018-04-10 01:30:56 +08:00
|
|
|
LLT OldValResTy = getMRI()->getType(OldValRes);
|
|
|
|
LLT AddrTy = getMRI()->getType(Addr);
|
|
|
|
LLT CmpValTy = getMRI()->getType(CmpVal);
|
|
|
|
LLT NewValTy = getMRI()->getType(NewVal);
|
2017-12-01 04:11:42 +08:00
|
|
|
assert(OldValResTy.isScalar() && "invalid operand type");
|
|
|
|
assert(AddrTy.isPointer() && "invalid operand type");
|
|
|
|
assert(CmpValTy.isValid() && "invalid operand type");
|
|
|
|
assert(NewValTy.isValid() && "invalid operand type");
|
|
|
|
assert(OldValResTy == CmpValTy && "type mismatch");
|
|
|
|
assert(OldValResTy == NewValTy && "type mismatch");
|
|
|
|
#endif
|
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
|
|
|
|
.addDef(OldValRes)
|
|
|
|
.addUse(Addr)
|
|
|
|
.addUse(CmpVal)
|
|
|
|
.addUse(NewVal)
|
|
|
|
.addMemOperand(&MMO);
|
|
|
|
}
|
|
|
|
|
[globalisel][irtranslator] Add support for atomicrmw and (strong) cmpxchg
Summary:
This patch adds support for the atomicrmw instructions and the strong
cmpxchg instruction to the IRTranslator.
I've left out weak cmpxchg because LangRef.rst isn't entirely clear on what
difference it makes to the backend. As far as I can tell from the code, it
only matters to AtomicExpandPass which is run at the LLVM-IR level.
Reviewers: ab, t.p.northover, qcolombet, rovka, aditya_nandakumar, volkan, javed.absar
Reviewed By: qcolombet
Subscribers: kristof.beyls, javed.absar, igorb, llvm-commits
Differential Revision: https://reviews.llvm.org/D40092
llvm-svn: 336589
2018-07-10 03:33:40 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMW(unsigned Opcode, unsigned OldValRes,
|
|
|
|
unsigned Addr, unsigned Val,
|
|
|
|
MachineMemOperand &MMO) {
|
|
|
|
#ifndef NDEBUG
|
|
|
|
LLT OldValResTy = getMRI()->getType(OldValRes);
|
|
|
|
LLT AddrTy = getMRI()->getType(Addr);
|
|
|
|
LLT ValTy = getMRI()->getType(Val);
|
|
|
|
assert(OldValResTy.isScalar() && "invalid operand type");
|
|
|
|
assert(AddrTy.isPointer() && "invalid operand type");
|
|
|
|
assert(ValTy.isValid() && "invalid operand type");
|
|
|
|
assert(OldValResTy == ValTy && "type mismatch");
|
|
|
|
#endif
|
|
|
|
|
|
|
|
return buildInstr(Opcode)
|
|
|
|
.addDef(OldValRes)
|
|
|
|
.addUse(Addr)
|
|
|
|
.addUse(Val)
|
|
|
|
.addMemOperand(&MMO);
|
|
|
|
}
|
|
|
|
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWXchg(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWAdd(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWSub(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWAnd(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWNand(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWOr(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWXor(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWMax(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWMin(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWUmax(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildAtomicRMWUmin(unsigned OldValRes, unsigned Addr,
|
|
|
|
unsigned Val, MachineMemOperand &MMO) {
|
|
|
|
return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
|
|
|
|
MMO);
|
|
|
|
}
|
|
|
|
|
2018-07-31 08:08:50 +08:00
|
|
|
MachineInstrBuilder
|
|
|
|
MachineIRBuilderBase::buildBlockAddress(unsigned Res, const BlockAddress *BA) {
|
|
|
|
#ifndef NDEBUG
|
|
|
|
assert(getMRI()->getType(Res).isPointer() && "invalid res type");
|
|
|
|
#endif
|
|
|
|
|
|
|
|
return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
|
|
|
|
}
|
|
|
|
|
2018-04-10 01:30:56 +08:00
|
|
|
void MachineIRBuilderBase::validateTruncExt(unsigned Dst, unsigned Src,
|
|
|
|
bool IsExtend) {
|
2016-08-24 06:14:15 +08:00
|
|
|
#ifndef NDEBUG
|
2018-04-10 01:30:56 +08:00
|
|
|
LLT SrcTy = getMRI()->getType(Src);
|
|
|
|
LLT DstTy = getMRI()->getType(Dst);
|
2016-08-24 05:01:33 +08:00
|
|
|
|
|
|
|
if (DstTy.isVector()) {
|
[globalisel][irtranslator] Add support for atomicrmw and (strong) cmpxchg
Summary:
This patch adds support for the atomicrmw instructions and the strong
cmpxchg instruction to the IRTranslator.
I've left out weak cmpxchg because LangRef.rst isn't entirely clear on what
difference it makes to the backend. As far as I can tell from the code, it
only matters to AtomicExpandPass which is run at the LLVM-IR level.
Reviewers: ab, t.p.northover, qcolombet, rovka, aditya_nandakumar, volkan, javed.absar
Reviewed By: qcolombet
Subscribers: kristof.beyls, javed.absar, igorb, llvm-commits
Differential Revision: https://reviews.llvm.org/D40092
llvm-svn: 336589
2018-07-10 03:33:40 +08:00
|
|
|
assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
|
2016-08-24 05:01:33 +08:00
|
|
|
assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
|
|
|
|
"different number of elements in a trunc/ext");
|
|
|
|
} else
|
|
|
|
assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
|
|
|
|
|
|
|
|
if (IsExtend)
|
|
|
|
assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
|
|
|
|
"invalid narrowing extend");
|
|
|
|
else
|
|
|
|
assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
|
|
|
|
"invalid widening trunc");
|
2016-08-24 06:14:15 +08:00
|
|
|
#endif
|
2016-08-24 05:01:33 +08:00
|
|
|
}
|