[AArch64] Add BTI to CFI jumptables.

With branch protection the jump to the jump table entries requires a landing pad.

Reviewed By: eugenis, tamas.petz

Differential Revision: https://reviews.llvm.org/D81251
This commit is contained in:
Daniel Kiss 2020-09-29 13:35:25 +02:00
parent e6f332ef1e
commit c5a4900e1a
2 changed files with 56 additions and 1 deletions

View File

@ -1205,6 +1205,7 @@ void LowerTypeTestsModule::verifyTypeMDNode(GlobalObject *GO, MDNode *Type) {
static const unsigned kX86JumpTableEntrySize = 8;
static const unsigned kARMJumpTableEntrySize = 4;
static const unsigned kARMBTIJumpTableEntrySize = 8;
unsigned LowerTypeTestsModule::getJumpTableEntrySize() {
switch (Arch) {
@ -1213,7 +1214,12 @@ unsigned LowerTypeTestsModule::getJumpTableEntrySize() {
return kX86JumpTableEntrySize;
case Triple::arm:
case Triple::thumb:
return kARMJumpTableEntrySize;
case Triple::aarch64:
if (const auto *BTE = mdconst::extract_or_null<ConstantInt>(
M.getModuleFlag("branch-target-enforcement")))
if (BTE->getZExtValue())
return kARMBTIJumpTableEntrySize;
return kARMJumpTableEntrySize;
default:
report_fatal_error("Unsupported architecture for jump tables");
@ -1232,7 +1238,13 @@ void LowerTypeTestsModule::createJumpTableEntry(
if (JumpTableArch == Triple::x86 || JumpTableArch == Triple::x86_64) {
AsmOS << "jmp ${" << ArgIndex << ":c}@plt\n";
AsmOS << "int3\nint3\nint3\n";
} else if (JumpTableArch == Triple::arm || JumpTableArch == Triple::aarch64) {
} else if (JumpTableArch == Triple::arm) {
AsmOS << "b $" << ArgIndex << "\n";
} else if (JumpTableArch == Triple::aarch64) {
if (const auto *BTE = mdconst::extract_or_null<ConstantInt>(
Dest->getParent()->getModuleFlag("branch-target-enforcement")))
if (BTE->getZExtValue())
AsmOS << "bti c\n";
AsmOS << "b $" << ArgIndex << "\n";
} else if (JumpTableArch == Triple::thumb) {
AsmOS << "b.w $" << ArgIndex << "\n";
@ -1394,6 +1406,10 @@ void LowerTypeTestsModule::createJumpTable(
// by Clang for -march=armv7.
F->addFnAttr("target-cpu", "cortex-a8");
}
if (JumpTableArch == Triple::aarch64) {
F->addFnAttr("branch-target-enforcement", "false");
F->addFnAttr("sign-return-address", "none");
}
// Make sure we don't emit .eh_frame for this function.
F->addFnAttr(Attribute::NoUnwind);

View File

@ -0,0 +1,39 @@
; RUN: opt -S -lowertypetests -mtriple=aarch64-unknown-linux-gnu < %s | FileCheck --check-prefixes=AARCH64 %s
; Test for the jump table generation with branch protection on AArch64
target datalayout = "e-p:64:64"
@0 = private unnamed_addr constant [2 x void (...)*] [void (...)* bitcast (void ()* @f to void (...)*), void (...)* bitcast (void ()* @g to void (...)*)], align 16
; AARCH64: @f = alias void (), void ()* @[[JT:.*]]
define void @f() !type !0 {
ret void
}
define internal void @g() !type !0 {
ret void
}
!0 = !{i32 0, !"typeid1"}
declare i1 @llvm.type.test(i8* %ptr, metadata %bitset) nounwind readnone
define i1 @foo(i8* %p) {
%x = call i1 @llvm.type.test(i8* %p, metadata !"typeid1")
ret i1 %x
}
!llvm.module.flags = !{!1}
!1 = !{i32 4, !"branch-target-enforcement", i32 1}
; AARCH64: define private void @[[JT]]() #[[ATTR:.*]] align 8 {
; AARCH64: bti c
; AARCH64-SAME: b $0
; AARCH64-SAME: bti c
; AARCH64-SAME: b $1
; AARCH64: attributes #[[ATTR]] = { naked nounwind "branch-target-enforcement"="false" "sign-return-address"="none"