forked from OSchip/llvm-project
Update ARM and x86 ArchHandler to match 64bits counterparts. NFC
Summary: Define an explicit type for arch specific reference kind and use it in switch statement to make the compiler emit warnings if some case is not cover. It will help to catch such errors when we add new mach-o reference kind. Reviewers: shankarke, kledzik Reviewed By: shankarke Subscribers: shankarke, aemerson, llvm-commits Projects: #lld Differential Revision: http://reviews.llvm.org/D7612 llvm-svn: 229246
This commit is contained in:
parent
f9dd7edd49
commit
8860b8d70c
|
@ -138,7 +138,7 @@ private:
|
|||
static const Registry::KindStrings _sKindStrings[];
|
||||
static const StubInfo _sStubInfoArmPIC;
|
||||
|
||||
enum : Reference::KindValue {
|
||||
enum Arm_Kinds : Reference::KindValue {
|
||||
invalid, /// for error condition
|
||||
|
||||
modeThumbCode, /// Content starting at this offset is thumb.
|
||||
|
@ -207,6 +207,7 @@ ArchHandler_arm::ArchHandler_arm() { }
|
|||
ArchHandler_arm::~ArchHandler_arm() { }
|
||||
|
||||
const Registry::KindStrings ArchHandler_arm::_sKindStrings[] = {
|
||||
LLD_KIND_STRING_ENTRY(invalid),
|
||||
LLD_KIND_STRING_ENTRY(modeThumbCode),
|
||||
LLD_KIND_STRING_ENTRY(modeArmCode),
|
||||
LLD_KIND_STRING_ENTRY(modeData),
|
||||
|
@ -912,7 +913,7 @@ void ArchHandler_arm::applyFixupFinal(const Reference &ref, uint8_t *loc,
|
|||
int32_t displacement;
|
||||
uint16_t value16;
|
||||
uint32_t value32;
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<Arm_Kinds>(ref.kindValue())) {
|
||||
case modeThumbCode:
|
||||
thumbMode = true;
|
||||
break;
|
||||
|
@ -1074,15 +1075,15 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
uint16_t value16;
|
||||
uint32_t value32;
|
||||
bool targetIsUndef = isa<UndefinedAtom>(ref.target());
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<Arm_Kinds>(ref.kindValue())) {
|
||||
case modeThumbCode:
|
||||
thumbMode = true;
|
||||
break;
|
||||
return;
|
||||
case modeArmCode:
|
||||
thumbMode = false;
|
||||
break;
|
||||
return;
|
||||
case modeData:
|
||||
break;
|
||||
return;
|
||||
case thumb_b22:
|
||||
case thumb_bl22:
|
||||
assert(thumbMode);
|
||||
|
@ -1094,7 +1095,7 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
displacement,
|
||||
targetIsUndef || targetIsThumb);
|
||||
*loc32 = value32;
|
||||
break;
|
||||
return;
|
||||
case thumb_movw:
|
||||
assert(thumbMode);
|
||||
if (useExternalReloc)
|
||||
|
@ -1102,7 +1103,7 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
else
|
||||
value16 = (targetAddress + ref.addend()) & 0xFFFF;
|
||||
*loc32 = setWordFromThumbMov(*loc32, value16);
|
||||
break;
|
||||
return;
|
||||
case thumb_movt:
|
||||
assert(thumbMode);
|
||||
if (useExternalReloc)
|
||||
|
@ -1110,17 +1111,17 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
else
|
||||
value16 = (targetAddress + ref.addend()) >> 16;
|
||||
*loc32 = setWordFromThumbMov(*loc32, value16);
|
||||
break;
|
||||
return;
|
||||
case thumb_movw_funcRel:
|
||||
assert(thumbMode);
|
||||
value16 = (targetAddress - inAtomAddress + ref.addend()) & 0xFFFF;
|
||||
*loc32 = setWordFromThumbMov(*loc32, value16);
|
||||
break;
|
||||
return;
|
||||
case thumb_movt_funcRel:
|
||||
assert(thumbMode);
|
||||
value16 = (targetAddress - inAtomAddress + ref.addend()) >> 16;
|
||||
*loc32 = setWordFromThumbMov(*loc32, value16);
|
||||
break;
|
||||
return;
|
||||
case arm_b24:
|
||||
case arm_bl24:
|
||||
assert(!thumbMode);
|
||||
|
@ -1131,7 +1132,7 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
value32 = setDisplacementInArmBranch(*loc32, displacement,
|
||||
targetIsThumb);
|
||||
*loc32 = value32;
|
||||
break;
|
||||
return;
|
||||
case arm_movw:
|
||||
assert(!thumbMode);
|
||||
if (useExternalReloc)
|
||||
|
@ -1139,7 +1140,7 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
else
|
||||
value16 = (targetAddress + ref.addend()) & 0xFFFF;
|
||||
*loc32 = setWordFromArmMov(*loc32, value16);
|
||||
break;
|
||||
return;
|
||||
case arm_movt:
|
||||
assert(!thumbMode);
|
||||
if (useExternalReloc)
|
||||
|
@ -1147,31 +1148,32 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
else
|
||||
value16 = (targetAddress + ref.addend()) >> 16;
|
||||
*loc32 = setWordFromArmMov(*loc32, value16);
|
||||
break;
|
||||
return;
|
||||
case arm_movw_funcRel:
|
||||
assert(!thumbMode);
|
||||
value16 = (targetAddress - inAtomAddress + ref.addend()) & 0xFFFF;
|
||||
*loc32 = setWordFromArmMov(*loc32, value16);
|
||||
break;
|
||||
return;
|
||||
case arm_movt_funcRel:
|
||||
assert(!thumbMode);
|
||||
value16 = (targetAddress - inAtomAddress + ref.addend()) >> 16;
|
||||
*loc32 = setWordFromArmMov(*loc32, value16);
|
||||
break;
|
||||
return;
|
||||
case pointer32:
|
||||
*loc32 = targetAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case delta32:
|
||||
*loc32 = targetAddress - fixupAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case lazyPointer:
|
||||
case lazyImmediateLocation:
|
||||
// do nothing
|
||||
break;
|
||||
default:
|
||||
llvm_unreachable("invalid ARM Reference Kind");
|
||||
return;
|
||||
case invalid:
|
||||
// Fall into llvm_unreachable().
|
||||
break;
|
||||
}
|
||||
llvm_unreachable("invalid ARM Reference Kind");
|
||||
}
|
||||
|
||||
void ArchHandler_arm::appendSectionRelocations(
|
||||
|
@ -1190,13 +1192,12 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
uint32_t targetAtomAddress;
|
||||
uint32_t fromAtomAddress;
|
||||
uint16_t other16;
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<Arm_Kinds>(ref.kindValue())) {
|
||||
case modeThumbCode:
|
||||
case modeArmCode:
|
||||
case modeData:
|
||||
break;
|
||||
// Do nothing.
|
||||
break;
|
||||
return;
|
||||
case thumb_b22:
|
||||
case thumb_bl22:
|
||||
if (useExternalReloc) {
|
||||
|
@ -1210,7 +1211,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
|
||||
ARM_THUMB_RELOC_BR22 | rPcRel | rLength4);
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case thumb_movw:
|
||||
if (useExternalReloc) {
|
||||
other16 = ref.addend() >> 16;
|
||||
|
@ -1234,7 +1235,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
ARM_RELOC_PAIR | rLenThmbLo);
|
||||
}
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case thumb_movt:
|
||||
if (useExternalReloc) {
|
||||
other16 = ref.addend() & 0xFFFF;
|
||||
|
@ -1258,7 +1259,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
ARM_RELOC_PAIR | rLenThmbHi);
|
||||
}
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case thumb_movw_funcRel:
|
||||
fromAtomAddress = addressForAtom(atom);
|
||||
targetAtomAddress = addressForAtom(*ref.target());
|
||||
|
@ -1267,7 +1268,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
ARM_RELOC_HALF_SECTDIFF | rScattered | rLenThmbLo);
|
||||
appendReloc(relocs, other16, 0, fromAtomAddress,
|
||||
ARM_RELOC_PAIR | rScattered | rLenThmbLo);
|
||||
break;
|
||||
return;
|
||||
case thumb_movt_funcRel:
|
||||
fromAtomAddress = addressForAtom(atom);
|
||||
targetAtomAddress = addressForAtom(*ref.target());
|
||||
|
@ -1276,7 +1277,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
ARM_RELOC_HALF_SECTDIFF | rScattered | rLenThmbHi);
|
||||
appendReloc(relocs, other16, 0, fromAtomAddress,
|
||||
ARM_RELOC_PAIR | rScattered | rLenThmbHi);
|
||||
break;
|
||||
return;
|
||||
case arm_b24:
|
||||
case arm_bl24:
|
||||
if (useExternalReloc) {
|
||||
|
@ -1290,7 +1291,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
|
||||
ARM_RELOC_BR24 | rPcRel | rLength4);
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case arm_movw:
|
||||
if (useExternalReloc) {
|
||||
other16 = ref.addend() >> 16;
|
||||
|
@ -1314,7 +1315,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
ARM_RELOC_PAIR | rLenArmLo);
|
||||
}
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case arm_movt:
|
||||
if (useExternalReloc) {
|
||||
other16 = ref.addend() & 0xFFFF;
|
||||
|
@ -1338,7 +1339,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
ARM_RELOC_PAIR | rLenArmHi);
|
||||
}
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case arm_movw_funcRel:
|
||||
fromAtomAddress = addressForAtom(atom);
|
||||
targetAtomAddress = addressForAtom(*ref.target());
|
||||
|
@ -1347,7 +1348,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
ARM_RELOC_HALF_SECTDIFF | rScattered | rLenArmLo);
|
||||
appendReloc(relocs, other16, 0, fromAtomAddress,
|
||||
ARM_RELOC_PAIR | rScattered | rLenArmLo);
|
||||
break;
|
||||
return;
|
||||
case arm_movt_funcRel:
|
||||
fromAtomAddress = addressForAtom(atom);
|
||||
targetAtomAddress = addressForAtom(*ref.target());
|
||||
|
@ -1356,7 +1357,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
ARM_RELOC_HALF_SECTDIFF | rScattered | rLenArmHi);
|
||||
appendReloc(relocs, other16, 0, fromAtomAddress,
|
||||
ARM_RELOC_PAIR | rScattered | rLenArmHi);
|
||||
break;
|
||||
return;
|
||||
case pointer32:
|
||||
if (useExternalReloc) {
|
||||
appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
|
||||
|
@ -1370,22 +1371,23 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
|
||||
ARM_RELOC_VANILLA | rLength4);
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case delta32:
|
||||
appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
|
||||
ARM_RELOC_SECTDIFF | rScattered | rLength4);
|
||||
appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
|
||||
ref.offsetInAtom(),
|
||||
ARM_RELOC_PAIR | rScattered | rLength4);
|
||||
break;
|
||||
return;
|
||||
case lazyPointer:
|
||||
case lazyImmediateLocation:
|
||||
// do nothing
|
||||
break;
|
||||
default:
|
||||
llvm_unreachable("invalid ARM Reference Kind");
|
||||
return;
|
||||
case invalid:
|
||||
// Fall into llvm_unreachable().
|
||||
break;
|
||||
}
|
||||
llvm_unreachable("invalid ARM Reference Kind");
|
||||
}
|
||||
|
||||
void ArchHandler_arm::addAdditionalReferences(MachODefinedAtom &atom) {
|
||||
|
|
|
@ -133,7 +133,7 @@ private:
|
|||
static const Registry::KindStrings _sKindStrings[];
|
||||
static const StubInfo _sStubInfo;
|
||||
|
||||
enum : Reference::KindValue {
|
||||
enum X86_Kinds : Reference::KindValue {
|
||||
invalid, /// for error condition
|
||||
|
||||
modeCode, /// Content starting at this offset is code.
|
||||
|
@ -441,38 +441,39 @@ void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *loc,
|
|||
return;
|
||||
assert(ref.kindArch() == Reference::KindArch::x86);
|
||||
ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<X86_Kinds>(ref.kindValue())) {
|
||||
case branch32:
|
||||
*loc32 = (targetAddress - (fixupAddress + 4)) + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case branch16:
|
||||
*loc32 = (targetAddress - (fixupAddress + 2)) + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case pointer32:
|
||||
case abs32:
|
||||
*loc32 = targetAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case funcRel32:
|
||||
*loc32 = targetAddress - inAtomAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case delta32:
|
||||
*loc32 = targetAddress - fixupAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case negDelta32:
|
||||
*loc32 = fixupAddress - targetAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case modeCode:
|
||||
case modeData:
|
||||
case lazyPointer:
|
||||
// do nothing
|
||||
break;
|
||||
return;
|
||||
case lazyImmediateLocation:
|
||||
*loc32 = ref.addend();
|
||||
break;
|
||||
default:
|
||||
llvm_unreachable("invalid x86 Reference Kind");
|
||||
return;
|
||||
case invalid:
|
||||
// Fall into llvm_unreachable().
|
||||
break;
|
||||
}
|
||||
llvm_unreachable("invalid x86 Reference Kind");
|
||||
}
|
||||
|
||||
void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
|
||||
|
@ -483,42 +484,43 @@ void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
|
|||
bool useExternalReloc = useExternalRelocationTo(*ref.target());
|
||||
ulittle16_t *loc16 = reinterpret_cast<ulittle16_t *>(loc);
|
||||
ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<X86_Kinds>(ref.kindValue())) {
|
||||
case branch32:
|
||||
if (useExternalReloc)
|
||||
*loc32 = ref.addend() - (fixupAddress + 4);
|
||||
else
|
||||
*loc32 =(targetAddress - (fixupAddress+4)) + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case branch16:
|
||||
if (useExternalReloc)
|
||||
*loc16 = ref.addend() - (fixupAddress + 2);
|
||||
else
|
||||
*loc16 = (targetAddress - (fixupAddress+2)) + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case pointer32:
|
||||
case abs32:
|
||||
*loc32 = targetAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case funcRel32:
|
||||
*loc32 = targetAddress - inAtomAddress + ref.addend(); // FIXME
|
||||
break;
|
||||
return;
|
||||
case delta32:
|
||||
*loc32 = targetAddress - fixupAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case negDelta32:
|
||||
*loc32 = fixupAddress - targetAddress + ref.addend();
|
||||
break;
|
||||
return;
|
||||
case modeCode:
|
||||
case modeData:
|
||||
case lazyPointer:
|
||||
case lazyImmediateLocation:
|
||||
// do nothing
|
||||
break;
|
||||
default:
|
||||
llvm_unreachable("invalid x86 Reference Kind");
|
||||
return;
|
||||
case invalid:
|
||||
// Fall into llvm_unreachable().
|
||||
break;
|
||||
}
|
||||
llvm_unreachable("invalid x86 Reference Kind");
|
||||
}
|
||||
|
||||
bool ArchHandler_x86::useExternalRelocationTo(const Atom &target) {
|
||||
|
@ -556,7 +558,7 @@ void ArchHandler_x86::appendSectionRelocations(
|
|||
assert(ref.kindArch() == Reference::KindArch::x86);
|
||||
uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
|
||||
bool useExternalReloc = useExternalRelocationTo(*ref.target());
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<X86_Kinds>(ref.kindValue())) {
|
||||
case modeCode:
|
||||
case modeData:
|
||||
break;
|
||||
|
@ -572,7 +574,7 @@ void ArchHandler_x86::appendSectionRelocations(
|
|||
appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
|
||||
GENERIC_RELOC_VANILLA | rPcRel | rLength4);
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case branch16:
|
||||
if (useExternalReloc) {
|
||||
appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
|
||||
|
@ -585,7 +587,7 @@ void ArchHandler_x86::appendSectionRelocations(
|
|||
appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()),0,
|
||||
GENERIC_RELOC_VANILLA | rPcRel | rLength2);
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case pointer32:
|
||||
case abs32:
|
||||
if (useExternalReloc)
|
||||
|
@ -599,36 +601,36 @@ void ArchHandler_x86::appendSectionRelocations(
|
|||
appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
|
||||
GENERIC_RELOC_VANILLA | rLength4);
|
||||
}
|
||||
break;
|
||||
return;
|
||||
case funcRel32:
|
||||
appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
|
||||
GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
|
||||
appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) - ref.addend(),
|
||||
GENERIC_RELOC_PAIR | rScattered | rLength4);
|
||||
break;
|
||||
return;
|
||||
case delta32:
|
||||
appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
|
||||
GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
|
||||
appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
|
||||
ref.offsetInAtom(),
|
||||
GENERIC_RELOC_PAIR | rScattered | rLength4);
|
||||
break;
|
||||
return;
|
||||
case negDelta32:
|
||||
appendReloc(relocs, sectionOffset, 0, addressForAtom(atom) +
|
||||
ref.offsetInAtom(),
|
||||
GENERIC_RELOC_SECTDIFF | rScattered | rLength4);
|
||||
appendReloc(relocs, sectionOffset, 0, addressForAtom(*ref.target()),
|
||||
GENERIC_RELOC_PAIR | rScattered | rLength4);
|
||||
break;
|
||||
return;
|
||||
case lazyPointer:
|
||||
case lazyImmediateLocation:
|
||||
llvm_unreachable("lazy reference kind implies Stubs pass was run");
|
||||
return;
|
||||
case invalid:
|
||||
// Fall into llvm_unreachable().
|
||||
break;
|
||||
default:
|
||||
llvm_unreachable("unknown x86 Reference Kind");
|
||||
break;
|
||||
|
||||
}
|
||||
llvm_unreachable("unknown x86 Reference Kind");
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue