forked from OSchip/llvm-project
Allow invokable sub-classes of IntrinsicInst
It used to be that all of our intrinsics were call instructions, but over time, we've added more and more invokable intrinsics. According to the verifier, we're up to 8 right now. As IntrinsicInst is a sub-class of CallInst, this puts us in an awkward spot where the idiomatic means to check for intrinsic has a false negative if the intrinsic is invoked. This change switches IntrinsicInst from being a sub-class of CallInst to being a subclass of CallBase. This allows invoked intrinsics to be instances of IntrinsicInst, at the cost of requiring a few more casts to CallInst in places where the intrinsic really is known to be a call, not an invoke. After this lands and has baked for a couple days, planned cleanups: Make GCStatepointInst a IntrinsicInst subclass. Merge intrinsic handling in InstCombine and use idiomatic visitIntrinsicInst entry point for InstVisitor. Do the same in SelectionDAG. Do the same in FastISEL. Differential Revision: https://reviews.llvm.org/D99976
This commit is contained in:
parent
080d48f279
commit
d87b9b81cc
|
@ -154,8 +154,8 @@ public:
|
||||||
// instruction to a special delegation helper.
|
// instruction to a special delegation helper.
|
||||||
#define HANDLE_INST(NUM, OPCODE, CLASS) \
|
#define HANDLE_INST(NUM, OPCODE, CLASS) \
|
||||||
RetTy visit##OPCODE(CLASS &I) { \
|
RetTy visit##OPCODE(CLASS &I) { \
|
||||||
if (NUM == Instruction::Call) \
|
if (NUM == Instruction::Call || NUM == Instruction::Invoke) \
|
||||||
return delegateCallInst(I); \
|
return delegateCallBase(I); \
|
||||||
else \
|
else \
|
||||||
DELEGATE(CLASS); \
|
DELEGATE(CLASS); \
|
||||||
}
|
}
|
||||||
|
@ -215,7 +215,12 @@ public:
|
||||||
RetTy visitVAStartInst(VAStartInst &I) { DELEGATE(IntrinsicInst); }
|
RetTy visitVAStartInst(VAStartInst &I) { DELEGATE(IntrinsicInst); }
|
||||||
RetTy visitVAEndInst(VAEndInst &I) { DELEGATE(IntrinsicInst); }
|
RetTy visitVAEndInst(VAEndInst &I) { DELEGATE(IntrinsicInst); }
|
||||||
RetTy visitVACopyInst(VACopyInst &I) { DELEGATE(IntrinsicInst); }
|
RetTy visitVACopyInst(VACopyInst &I) { DELEGATE(IntrinsicInst); }
|
||||||
RetTy visitIntrinsicInst(IntrinsicInst &I) { DELEGATE(CallInst); }
|
RetTy visitIntrinsicInst(IntrinsicInst &I) {
|
||||||
|
if (isa<CallInst>(I))
|
||||||
|
return static_cast<SubClass*>(this)->visitCallInst(cast<CallInst>(I));
|
||||||
|
else
|
||||||
|
return static_cast<SubClass*>(this)->visitInvokeInst(cast<InvokeInst>(I));
|
||||||
|
}
|
||||||
RetTy visitCallInst(CallInst &I) { DELEGATE(CallBase); }
|
RetTy visitCallInst(CallInst &I) { DELEGATE(CallBase); }
|
||||||
RetTy visitInvokeInst(InvokeInst &I) { DELEGATE(CallBase); }
|
RetTy visitInvokeInst(InvokeInst &I) { DELEGATE(CallBase); }
|
||||||
RetTy visitCallBrInst(CallBrInst &I) { DELEGATE(CallBase); }
|
RetTy visitCallBrInst(CallBrInst &I) { DELEGATE(CallBase); }
|
||||||
|
@ -280,7 +285,7 @@ public:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// Special helper function to delegate to CallInst subclass visitors.
|
// Special helper function to delegate to CallInst subclass visitors.
|
||||||
RetTy delegateCallInst(CallInst &I) {
|
RetTy delegateCallBase(CallBase &I) {
|
||||||
if (const Function *F = I.getCalledFunction()) {
|
if (const Function *F = I.getCalledFunction()) {
|
||||||
switch (F->getIntrinsicID()) {
|
switch (F->getIntrinsicID()) {
|
||||||
default: DELEGATE(IntrinsicInst);
|
default: DELEGATE(IntrinsicInst);
|
||||||
|
@ -296,13 +301,16 @@ private:
|
||||||
case Intrinsic::not_intrinsic: break;
|
case Intrinsic::not_intrinsic: break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DELEGATE(CallInst);
|
if (isa<CallInst>(I))
|
||||||
|
return static_cast<SubClass*>(this)->visitCallInst(cast<CallInst>(I));
|
||||||
|
else
|
||||||
|
return static_cast<SubClass*>(this)->visitInvokeInst(cast<InvokeInst>(I));
|
||||||
}
|
}
|
||||||
|
|
||||||
// An overload that will never actually be called, it is used only from dead
|
// An overload that will never actually be called, it is used only from dead
|
||||||
// code in the dispatching from opcodes to instruction subclasses.
|
// code in the dispatching from opcodes to instruction subclasses.
|
||||||
RetTy delegateCallInst(Instruction &I) {
|
RetTy delegateCallBase(Instruction &I) {
|
||||||
llvm_unreachable("delegateCallInst called for non-CallInst");
|
llvm_unreachable("delegateCallBase called for non-CallBase");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -13,10 +13,10 @@
|
||||||
// if (MemCpyInst *MCI = dyn_cast<MemCpyInst>(Inst))
|
// if (MemCpyInst *MCI = dyn_cast<MemCpyInst>(Inst))
|
||||||
// ... MCI->getDest() ... MCI->getSource() ...
|
// ... MCI->getDest() ... MCI->getSource() ...
|
||||||
//
|
//
|
||||||
// All intrinsic function calls are instances of the call instruction, so these
|
// All intrinsic function calls are instances of the call or invoke instruction,
|
||||||
// are all subclasses of the CallInst class. Note that none of these classes
|
// so these are all subclasses of the CallBase class. Note that none of these
|
||||||
// has state or virtual methods, which is an important part of this gross/neat
|
// classes has state or virtual methods, which is an important part of this
|
||||||
// hack working.
|
// gross/neat hack working.
|
||||||
//
|
//
|
||||||
//===----------------------------------------------------------------------===//
|
//===----------------------------------------------------------------------===//
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ namespace llvm {
|
||||||
/// A wrapper class for inspecting calls to intrinsic functions.
|
/// A wrapper class for inspecting calls to intrinsic functions.
|
||||||
/// This allows the standard isa/dyncast/cast functionality to work with calls
|
/// This allows the standard isa/dyncast/cast functionality to work with calls
|
||||||
/// to intrinsic functions.
|
/// to intrinsic functions.
|
||||||
class IntrinsicInst : public CallInst {
|
class IntrinsicInst : public CallBase {
|
||||||
public:
|
public:
|
||||||
IntrinsicInst() = delete;
|
IntrinsicInst() = delete;
|
||||||
IntrinsicInst(const IntrinsicInst &) = delete;
|
IntrinsicInst(const IntrinsicInst &) = delete;
|
||||||
|
@ -107,13 +107,13 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
// Methods for support type inquiry through isa, cast, and dyn_cast:
|
// Methods for support type inquiry through isa, cast, and dyn_cast:
|
||||||
static bool classof(const CallInst *I) {
|
static bool classof(const CallBase *I) {
|
||||||
if (const Function *CF = I->getCalledFunction())
|
if (const Function *CF = I->getCalledFunction())
|
||||||
return CF->isIntrinsic();
|
return CF->isIntrinsic();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
static bool classof(const Value *V) {
|
static bool classof(const Value *V) {
|
||||||
return isa<CallInst>(V) && classof(cast<CallInst>(V));
|
return isa<CallBase>(V) && classof(cast<CallBase>(V));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -83,7 +83,7 @@ void llvm::findDevirtualizableCallsForTypeTest(
|
||||||
// Find llvm.assume intrinsics for this llvm.type.test call.
|
// Find llvm.assume intrinsics for this llvm.type.test call.
|
||||||
for (const Use &CIU : CI->uses())
|
for (const Use &CIU : CI->uses())
|
||||||
if (auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
|
if (auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
|
||||||
Assumes.push_back(Assume);
|
Assumes.push_back(cast<CallInst>(Assume));
|
||||||
|
|
||||||
// If we found any, search for virtual calls based on %p and add them to
|
// If we found any, search for virtual calls based on %p and add them to
|
||||||
// DevirtCalls.
|
// DevirtCalls.
|
||||||
|
|
|
@ -1338,15 +1338,15 @@ bool FastISel::selectIntrinsicCall(const IntrinsicInst *II) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
case Intrinsic::experimental_stackmap:
|
case Intrinsic::experimental_stackmap:
|
||||||
return selectStackmap(II);
|
return selectStackmap(cast<CallInst>(II));
|
||||||
case Intrinsic::experimental_patchpoint_void:
|
case Intrinsic::experimental_patchpoint_void:
|
||||||
case Intrinsic::experimental_patchpoint_i64:
|
case Intrinsic::experimental_patchpoint_i64:
|
||||||
return selectPatchpoint(II);
|
return selectPatchpoint(cast<CallInst>(II));
|
||||||
|
|
||||||
case Intrinsic::xray_customevent:
|
case Intrinsic::xray_customevent:
|
||||||
return selectXRayCustomEvent(II);
|
return selectXRayCustomEvent(cast<CallInst>(II));
|
||||||
case Intrinsic::xray_typedevent:
|
case Intrinsic::xray_typedevent:
|
||||||
return selectXRayTypedEvent(II);
|
return selectXRayTypedEvent(cast<CallInst>(II));
|
||||||
}
|
}
|
||||||
|
|
||||||
return fastLowerIntrinsicCall(II);
|
return fastLowerIntrinsicCall(II);
|
||||||
|
|
|
@ -244,7 +244,7 @@ void ShadowStackGCLowering::CollectRoots(Function &F) {
|
||||||
if (Function *F = CI->getCalledFunction())
|
if (Function *F = CI->getCalledFunction())
|
||||||
if (F->getIntrinsicID() == Intrinsic::gcroot) {
|
if (F->getIntrinsicID() == Intrinsic::gcroot) {
|
||||||
std::pair<CallInst *, AllocaInst *> Pair = std::make_pair(
|
std::pair<CallInst *, AllocaInst *> Pair = std::make_pair(
|
||||||
CI,
|
cast<CallInst>(CI),
|
||||||
cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts()));
|
cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts()));
|
||||||
if (IsNullValue(CI->getArgOperand(1)))
|
if (IsNullValue(CI->getArgOperand(1)))
|
||||||
Roots.push_back(Pair);
|
Roots.push_back(Pair);
|
||||||
|
|
|
@ -255,7 +255,7 @@ static const CallInst *findStackProtectorIntrinsic(Function &F) {
|
||||||
for (const Instruction &I : BB)
|
for (const Instruction &I : BB)
|
||||||
if (const auto *II = dyn_cast<IntrinsicInst>(&I))
|
if (const auto *II = dyn_cast<IntrinsicInst>(&I))
|
||||||
if (II->getIntrinsicID() == Intrinsic::stackprotector)
|
if (II->getIntrinsicID() == Intrinsic::stackprotector)
|
||||||
return II;
|
return cast<CallInst>(II);
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1143,7 +1143,7 @@ void Interpreter::visitIntrinsicInst(IntrinsicInst &I) {
|
||||||
bool atBegin(Parent->begin() == Me);
|
bool atBegin(Parent->begin() == Me);
|
||||||
if (!atBegin)
|
if (!atBegin)
|
||||||
--Me;
|
--Me;
|
||||||
IL->LowerIntrinsicCall(&I);
|
IL->LowerIntrinsicCall(cast<CallInst>(&I));
|
||||||
|
|
||||||
// Restore the CurInst pointer to the first instruction newly inserted, if
|
// Restore the CurInst pointer to the first instruction newly inserted, if
|
||||||
// any.
|
// any.
|
||||||
|
|
|
@ -3482,7 +3482,8 @@ bool AArch64FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
const char *IntrMemName = isa<MemCpyInst>(II) ? "memcpy" : "memmove";
|
const char *IntrMemName = isa<MemCpyInst>(II) ? "memcpy" : "memmove";
|
||||||
return lowerCallTo(II, IntrMemName, II->getNumArgOperands() - 1);
|
return lowerCallTo(cast<CallInst>(II), IntrMemName,
|
||||||
|
II->getNumArgOperands() - 1);
|
||||||
}
|
}
|
||||||
case Intrinsic::memset: {
|
case Intrinsic::memset: {
|
||||||
const MemSetInst *MSI = cast<MemSetInst>(II);
|
const MemSetInst *MSI = cast<MemSetInst>(II);
|
||||||
|
@ -3498,7 +3499,8 @@ bool AArch64FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||||
// address spaces.
|
// address spaces.
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return lowerCallTo(II, "memset", II->getNumArgOperands() - 1);
|
return lowerCallTo(cast<CallInst>(II), "memset",
|
||||||
|
II->getNumArgOperands() - 1);
|
||||||
}
|
}
|
||||||
case Intrinsic::sin:
|
case Intrinsic::sin:
|
||||||
case Intrinsic::cos:
|
case Intrinsic::cos:
|
||||||
|
|
|
@ -1660,7 +1660,7 @@ bool MipsFastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||||
if (!MTI->getLength()->getType()->isIntegerTy(32))
|
if (!MTI->getLength()->getType()->isIntegerTy(32))
|
||||||
return false;
|
return false;
|
||||||
const char *IntrMemName = isa<MemCpyInst>(II) ? "memcpy" : "memmove";
|
const char *IntrMemName = isa<MemCpyInst>(II) ? "memcpy" : "memmove";
|
||||||
return lowerCallTo(II, IntrMemName, II->getNumArgOperands() - 1);
|
return lowerCallTo(cast<CallInst>(II), IntrMemName, II->getNumArgOperands() - 1);
|
||||||
}
|
}
|
||||||
case Intrinsic::memset: {
|
case Intrinsic::memset: {
|
||||||
const MemSetInst *MSI = cast<MemSetInst>(II);
|
const MemSetInst *MSI = cast<MemSetInst>(II);
|
||||||
|
@ -1669,7 +1669,7 @@ bool MipsFastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||||
return false;
|
return false;
|
||||||
if (!MSI->getLength()->getType()->isIntegerTy(32))
|
if (!MSI->getLength()->getType()->isIntegerTy(32))
|
||||||
return false;
|
return false;
|
||||||
return lowerCallTo(II, "memset", II->getNumArgOperands() - 1);
|
return lowerCallTo(cast<CallInst>(II), "memset", II->getNumArgOperands() - 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -2738,7 +2738,7 @@ bool X86FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||||
if (MCI->getSourceAddressSpace() > 255 || MCI->getDestAddressSpace() > 255)
|
if (MCI->getSourceAddressSpace() > 255 || MCI->getDestAddressSpace() > 255)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return lowerCallTo(II, "memcpy", II->getNumArgOperands() - 1);
|
return lowerCallTo(cast<CallInst>(II), "memcpy", II->getNumArgOperands() - 1);
|
||||||
}
|
}
|
||||||
case Intrinsic::memset: {
|
case Intrinsic::memset: {
|
||||||
const MemSetInst *MSI = cast<MemSetInst>(II);
|
const MemSetInst *MSI = cast<MemSetInst>(II);
|
||||||
|
@ -2753,7 +2753,7 @@ bool X86FastISel::fastLowerIntrinsicCall(const IntrinsicInst *II) {
|
||||||
if (MSI->getDestAddressSpace() > 255)
|
if (MSI->getDestAddressSpace() > 255)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return lowerCallTo(II, "memset", II->getNumArgOperands() - 1);
|
return lowerCallTo(cast<CallInst>(II), "memset", II->getNumArgOperands() - 1);
|
||||||
}
|
}
|
||||||
case Intrinsic::stackprotector: {
|
case Intrinsic::stackprotector: {
|
||||||
// Emit code to store the stack guard onto the stack.
|
// Emit code to store the stack guard onto the stack.
|
||||||
|
|
|
@ -140,6 +140,7 @@ public:
|
||||||
Instruction *visitAddrSpaceCast(AddrSpaceCastInst &CI);
|
Instruction *visitAddrSpaceCast(AddrSpaceCastInst &CI);
|
||||||
Instruction *foldItoFPtoI(CastInst &FI);
|
Instruction *foldItoFPtoI(CastInst &FI);
|
||||||
Instruction *visitSelectInst(SelectInst &SI);
|
Instruction *visitSelectInst(SelectInst &SI);
|
||||||
|
Instruction *visitCallBase(CallBase &Call);
|
||||||
Instruction *visitCallInst(CallInst &CI);
|
Instruction *visitCallInst(CallInst &CI);
|
||||||
Instruction *visitInvokeInst(InvokeInst &II);
|
Instruction *visitInvokeInst(InvokeInst &II);
|
||||||
Instruction *visitCallBrInst(CallBrInst &CBI);
|
Instruction *visitCallBrInst(CallBrInst &CBI);
|
||||||
|
@ -222,7 +223,6 @@ private:
|
||||||
Instruction &CtxI, Value *&OperationResult,
|
Instruction &CtxI, Value *&OperationResult,
|
||||||
Constant *&OverflowResult);
|
Constant *&OverflowResult);
|
||||||
|
|
||||||
Instruction *visitCallBase(CallBase &Call);
|
|
||||||
Instruction *tryOptimizeCall(CallInst *CI);
|
Instruction *tryOptimizeCall(CallInst *CI);
|
||||||
bool transformConstExprCastCall(CallBase &Call);
|
bool transformConstExprCastCall(CallBase &Call);
|
||||||
Instruction *transformCallThroughTrampoline(CallBase &Call,
|
Instruction *transformCallThroughTrampoline(CallBase &Call,
|
||||||
|
|
|
@ -4142,7 +4142,7 @@ struct VarArgAMD64Helper : public VarArgHelper {
|
||||||
Value *VAArgTLSOriginCopy = nullptr;
|
Value *VAArgTLSOriginCopy = nullptr;
|
||||||
Value *VAArgOverflowSize = nullptr;
|
Value *VAArgOverflowSize = nullptr;
|
||||||
|
|
||||||
SmallVector<CallInst*, 16> VAStartInstrumentationList;
|
SmallVector<IntrinsicInst*, 16> VAStartInstrumentationList;
|
||||||
|
|
||||||
enum ArgKind { AK_GeneralPurpose, AK_FloatingPoint, AK_Memory };
|
enum ArgKind { AK_GeneralPurpose, AK_FloatingPoint, AK_Memory };
|
||||||
|
|
||||||
|
@ -4351,7 +4351,7 @@ struct VarArgAMD64Helper : public VarArgHelper {
|
||||||
// Instrument va_start.
|
// Instrument va_start.
|
||||||
// Copy va_list shadow from the backup copy of the TLS contents.
|
// Copy va_list shadow from the backup copy of the TLS contents.
|
||||||
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
||||||
CallInst *OrigInst = VAStartInstrumentationList[i];
|
auto *OrigInst = VAStartInstrumentationList[i];
|
||||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||||
|
|
||||||
|
@ -4405,7 +4405,7 @@ struct VarArgMIPS64Helper : public VarArgHelper {
|
||||||
Value *VAArgTLSCopy = nullptr;
|
Value *VAArgTLSCopy = nullptr;
|
||||||
Value *VAArgSize = nullptr;
|
Value *VAArgSize = nullptr;
|
||||||
|
|
||||||
SmallVector<CallInst*, 16> VAStartInstrumentationList;
|
SmallVector<IntrinsicInst*, 16> VAStartInstrumentationList;
|
||||||
|
|
||||||
VarArgMIPS64Helper(Function &F, MemorySanitizer &MS,
|
VarArgMIPS64Helper(Function &F, MemorySanitizer &MS,
|
||||||
MemorySanitizerVisitor &MSV) : F(F), MS(MS), MSV(MSV) {}
|
MemorySanitizerVisitor &MSV) : F(F), MS(MS), MSV(MSV) {}
|
||||||
|
@ -4494,7 +4494,7 @@ struct VarArgMIPS64Helper : public VarArgHelper {
|
||||||
// Instrument va_start.
|
// Instrument va_start.
|
||||||
// Copy va_list shadow from the backup copy of the TLS contents.
|
// Copy va_list shadow from the backup copy of the TLS contents.
|
||||||
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
||||||
CallInst *OrigInst = VAStartInstrumentationList[i];
|
auto *OrigInst = VAStartInstrumentationList[i];
|
||||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||||
Type *RegSaveAreaPtrTy = Type::getInt64PtrTy(*MS.C);
|
Type *RegSaveAreaPtrTy = Type::getInt64PtrTy(*MS.C);
|
||||||
|
@ -4533,7 +4533,7 @@ struct VarArgAArch64Helper : public VarArgHelper {
|
||||||
Value *VAArgTLSCopy = nullptr;
|
Value *VAArgTLSCopy = nullptr;
|
||||||
Value *VAArgOverflowSize = nullptr;
|
Value *VAArgOverflowSize = nullptr;
|
||||||
|
|
||||||
SmallVector<CallInst*, 16> VAStartInstrumentationList;
|
SmallVector<IntrinsicInst*, 16> VAStartInstrumentationList;
|
||||||
|
|
||||||
enum ArgKind { AK_GeneralPurpose, AK_FloatingPoint, AK_Memory };
|
enum ArgKind { AK_GeneralPurpose, AK_FloatingPoint, AK_Memory };
|
||||||
|
|
||||||
|
@ -4688,7 +4688,7 @@ struct VarArgAArch64Helper : public VarArgHelper {
|
||||||
// Instrument va_start, copy va_list shadow from the backup copy of
|
// Instrument va_start, copy va_list shadow from the backup copy of
|
||||||
// the TLS contents.
|
// the TLS contents.
|
||||||
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
||||||
CallInst *OrigInst = VAStartInstrumentationList[i];
|
auto *OrigInst = VAStartInstrumentationList[i];
|
||||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||||
|
|
||||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||||
|
@ -4783,7 +4783,7 @@ struct VarArgPowerPC64Helper : public VarArgHelper {
|
||||||
Value *VAArgTLSCopy = nullptr;
|
Value *VAArgTLSCopy = nullptr;
|
||||||
Value *VAArgSize = nullptr;
|
Value *VAArgSize = nullptr;
|
||||||
|
|
||||||
SmallVector<CallInst*, 16> VAStartInstrumentationList;
|
SmallVector<IntrinsicInst*, 16> VAStartInstrumentationList;
|
||||||
|
|
||||||
VarArgPowerPC64Helper(Function &F, MemorySanitizer &MS,
|
VarArgPowerPC64Helper(Function &F, MemorySanitizer &MS,
|
||||||
MemorySanitizerVisitor &MSV) : F(F), MS(MS), MSV(MSV) {}
|
MemorySanitizerVisitor &MSV) : F(F), MS(MS), MSV(MSV) {}
|
||||||
|
@ -4932,7 +4932,7 @@ struct VarArgPowerPC64Helper : public VarArgHelper {
|
||||||
// Instrument va_start.
|
// Instrument va_start.
|
||||||
// Copy va_list shadow from the backup copy of the TLS contents.
|
// Copy va_list shadow from the backup copy of the TLS contents.
|
||||||
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
for (size_t i = 0, n = VAStartInstrumentationList.size(); i < n; i++) {
|
||||||
CallInst *OrigInst = VAStartInstrumentationList[i];
|
auto *OrigInst = VAStartInstrumentationList[i];
|
||||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||||
Type *RegSaveAreaPtrTy = Type::getInt64PtrTy(*MS.C);
|
Type *RegSaveAreaPtrTy = Type::getInt64PtrTy(*MS.C);
|
||||||
|
@ -4972,7 +4972,7 @@ struct VarArgSystemZHelper : public VarArgHelper {
|
||||||
Value *VAArgTLSOriginCopy = nullptr;
|
Value *VAArgTLSOriginCopy = nullptr;
|
||||||
Value *VAArgOverflowSize = nullptr;
|
Value *VAArgOverflowSize = nullptr;
|
||||||
|
|
||||||
SmallVector<CallInst *, 16> VAStartInstrumentationList;
|
SmallVector<IntrinsicInst *, 16> VAStartInstrumentationList;
|
||||||
|
|
||||||
enum class ArgKind {
|
enum class ArgKind {
|
||||||
GeneralPurpose,
|
GeneralPurpose,
|
||||||
|
@ -5255,7 +5255,7 @@ struct VarArgSystemZHelper : public VarArgHelper {
|
||||||
// Copy va_list shadow from the backup copy of the TLS contents.
|
// Copy va_list shadow from the backup copy of the TLS contents.
|
||||||
for (size_t VaStartNo = 0, VaStartNum = VAStartInstrumentationList.size();
|
for (size_t VaStartNo = 0, VaStartNum = VAStartInstrumentationList.size();
|
||||||
VaStartNo < VaStartNum; VaStartNo++) {
|
VaStartNo < VaStartNum; VaStartNo++) {
|
||||||
CallInst *OrigInst = VAStartInstrumentationList[VaStartNo];
|
auto *OrigInst = VAStartInstrumentationList[VaStartNo];
|
||||||
IRBuilder<> IRB(OrigInst->getNextNode());
|
IRBuilder<> IRB(OrigInst->getNextNode());
|
||||||
Value *VAListTag = OrigInst->getArgOperand(0);
|
Value *VAListTag = OrigInst->getArgOperand(0);
|
||||||
copyRegSaveArea(IRB, VAListTag);
|
copyRegSaveArea(IRB, VAListTag);
|
||||||
|
|
Loading…
Reference in New Issue