forked from OSchip/llvm-project
[ObjCARC] Strength reduce objc_retainBlock -> objc_retain if the objc_retainBlock is optimizable.
If an objc_retainBlock has the copy_on_escape metadata attached to it AND if the block pointer argument only escapes down the stack, we are allowed to strength reduce the objc_retainBlock to to an objc_retain and thus optimize it. Current there is logic in the ARC data flow analysis to handle this case which is complicated and involved making distinctions in between objc_retainBlock and objc_retain in certain places and considering them the same in others. This patch simplifies said code by: 1. Performing the strength reduction in the initial ARC peephole analysis (ObjCARCOpts::OptimizeIndividualCalls). 2. Changes the ARC dataflow analysis (which runs after the peephole analysis) to consider all objc_retainBlock calls to not be optimizable (since if the call was optimizable, we would have strength reduced it already). This patch leaves in the infrastructure in the ARC dataflow analysis to handle this case, which due to 2 will just be dead code. I am doing this on purpose to separate the removal of the old code from the testing of the new code. <rdar://problem/13249661>. llvm-svn: 178284
This commit is contained in:
parent
3a52abf553
commit
158fdf699e
|
@ -986,6 +986,8 @@ namespace {
|
|||
bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
|
||||
void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
|
||||
InstructionClass &Class);
|
||||
bool OptimizeRetainBlockCall(Function &F, Instruction *RetainBlock,
|
||||
InstructionClass &Class);
|
||||
void OptimizeIndividualCalls(Function &F);
|
||||
|
||||
void CheckForCFGHazards(const BasicBlock *BB,
|
||||
|
@ -1319,6 +1321,35 @@ ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
|
|||
|
||||
}
|
||||
|
||||
// \brief Attempt to strength reduce objc_retainBlock calls to objc_retain
|
||||
// calls.
|
||||
//
|
||||
// Specifically: If an objc_retainBlock call has the copy_on_escape metadata and
|
||||
// does not escape (following the rules of block escaping), strength reduce the
|
||||
// objc_retainBlock to an objc_retain.
|
||||
//
|
||||
// TODO: If an objc_retainBlock call is dominated period by a previous
|
||||
// objc_retainBlock call, strength reduce the objc_retainBlock to an
|
||||
// objc_retain.
|
||||
bool
|
||||
ObjCARCOpt::OptimizeRetainBlockCall(Function &F, Instruction *Inst,
|
||||
InstructionClass &Class) {
|
||||
assert(GetBasicInstructionClass(Inst) == Class);
|
||||
assert(IC_RetainBlock == Class);
|
||||
|
||||
// If we can not optimize Inst, return false.
|
||||
if (!IsRetainBlockOptimizable(Inst))
|
||||
return false;
|
||||
|
||||
CallInst *RetainBlock = cast<CallInst>(Inst);
|
||||
RetainBlock->setCalledFunction(getRetainCallee(F.getParent()));
|
||||
// Remove copy_on_escape metadata.
|
||||
RetainBlock->setMetadata(CopyOnEscapeMDKind, 0);
|
||||
Class = IC_Retain;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// Visit each call, one at a time, and make simplifications without doing any
|
||||
/// additional analysis.
|
||||
void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
|
||||
|
@ -1402,6 +1433,12 @@ void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
|
|||
}
|
||||
break;
|
||||
}
|
||||
case IC_RetainBlock:
|
||||
// If we strength reduce an objc_retainBlock to amn objc_retain, continue
|
||||
// onto the objc_retain peephole optimizations. Otherwise break.
|
||||
if (!OptimizeRetainBlockCall(F, Inst, Class))
|
||||
break;
|
||||
// FALLTHROUGH
|
||||
case IC_Retain:
|
||||
OptimizeRetainCall(F, Inst);
|
||||
break;
|
||||
|
@ -1748,11 +1785,10 @@ ObjCARCOpt::VisitInstructionBottomUp(Instruction *Inst,
|
|||
break;
|
||||
}
|
||||
case IC_RetainBlock:
|
||||
// An objc_retainBlock call with just a use may need to be kept,
|
||||
// because it may be copying a block from the stack to the heap.
|
||||
if (!IsRetainBlockOptimizable(Inst))
|
||||
break;
|
||||
// FALLTHROUGH
|
||||
// In OptimizeIndividualCalls, we have strength reduced all optimizable
|
||||
// objc_retainBlocks to objc_retains. Thus at this point any
|
||||
// objc_retainBlocks that we see are not optimizable.
|
||||
break;
|
||||
case IC_Retain:
|
||||
case IC_RetainRV: {
|
||||
Arg = GetObjCArg(Inst);
|
||||
|
@ -1961,11 +1997,10 @@ ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
|
|||
|
||||
switch (Class) {
|
||||
case IC_RetainBlock:
|
||||
// An objc_retainBlock call with just a use may need to be kept,
|
||||
// because it may be copying a block from the stack to the heap.
|
||||
if (!IsRetainBlockOptimizable(Inst))
|
||||
break;
|
||||
// FALLTHROUGH
|
||||
// In OptimizeIndividualCalls, we have strength reduced all optimizable
|
||||
// objc_retainBlocks to objc_retains. Thus at this point any
|
||||
// objc_retainBlocks that we see are not optimizable.
|
||||
break;
|
||||
case IC_Retain:
|
||||
case IC_RetainRV: {
|
||||
Arg = GetObjCArg(Inst);
|
||||
|
|
|
@ -795,10 +795,10 @@ entry:
|
|||
ret void
|
||||
}
|
||||
|
||||
; Don't optimize objc_retainBlock.
|
||||
; Don't optimize objc_retainBlock, but do strength reduce it.
|
||||
|
||||
; CHECK: define void @test23b
|
||||
; CHECK: @objc_retainBlock
|
||||
; CHECK: @objc_retain
|
||||
; CHECK: @objc_release
|
||||
; CHECK: }
|
||||
define void @test23b(i8* %p) {
|
||||
|
|
|
@ -59,11 +59,12 @@ lpad: ; preds = %entry
|
|||
resume { i8*, i32 } %t8
|
||||
}
|
||||
|
||||
; There is no !clang.arc.no_objc_arc_exceptions
|
||||
; metadata here, so the optimizer shouldn't eliminate anything.
|
||||
; There is no !clang.arc.no_objc_arc_exceptions metadata here, so the optimizer
|
||||
; shouldn't eliminate anything, but *CAN* strength reduce the objc_retainBlock
|
||||
; to an objc_retain.
|
||||
|
||||
; CHECK: define void @test0_no_metadata(
|
||||
; CHECK: call i8* @objc_retainBlock(
|
||||
; CHECK: call i8* @objc_retain(
|
||||
; CHECK: invoke
|
||||
; CHECK: call void @objc_release(
|
||||
; CHECK: }
|
||||
|
|
Loading…
Reference in New Issue