| 1 | //===------------------------------------------------------------*- C++ -*-===// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | |
| 9 | #ifndef LLD_ELF_RELOCSCAN_H |
| 10 | #define LLD_ELF_RELOCSCAN_H |
| 11 | |
| 12 | #include "Config.h" |
| 13 | #include "InputFiles.h" |
| 14 | #include "InputSection.h" |
| 15 | #include "Relocations.h" |
| 16 | #include "SyntheticSections.h" |
| 17 | #include "Target.h" |
| 18 | |
| 19 | using namespace llvm; |
| 20 | using namespace llvm::ELF; |
| 21 | using namespace llvm::object; |
| 22 | |
| 23 | namespace lld::elf { |
| 24 | |
| 25 | // Build a bitmask with one bit set for each 64 subset of RelExpr. |
| 26 | inline constexpr uint64_t buildMask() { return 0; } |
| 27 | |
| 28 | template <typename... Tails> |
| 29 | inline constexpr uint64_t buildMask(int head, Tails... tails) { |
| 30 | return (0 <= head && head < 64 ? uint64_t(1) << head : 0) | |
| 31 | buildMask(tails...); |
| 32 | } |
| 33 | |
| 34 | // Return true if `Expr` is one of `Exprs`. |
| 35 | // There are more than 64 but less than 128 RelExprs, so we divide the set of |
| 36 | // exprs into [0, 64) and [64, 128) and represent each range as a constant |
| 37 | // 64-bit mask. Then we decide which mask to test depending on the value of |
| 38 | // expr and use a simple shift and bitwise-and to test for membership. |
| 39 | template <RelExpr... Exprs> bool oneof(RelExpr expr) { |
| 40 | assert(0 <= expr && (int)expr < 128 && |
| 41 | "RelExpr is too large for 128-bit mask!" ); |
| 42 | |
| 43 | if (expr >= 64) |
| 44 | return (uint64_t(1) << (expr - 64)) & buildMask((Exprs - 64)...); |
| 45 | return (uint64_t(1) << expr) & buildMask(Exprs...); |
| 46 | } |
| 47 | |
| 48 | // This class encapsulates states needed to scan relocations for one |
| 49 | // InputSectionBase. |
| 50 | class RelocScan { |
| 51 | public: |
| 52 | Ctx &ctx; |
| 53 | InputSectionBase *sec; |
| 54 | |
| 55 | RelocScan(Ctx &ctx, InputSectionBase *sec = nullptr) : ctx(ctx), sec(sec) {} |
| 56 | template <class ELFT, class RelTy> |
| 57 | void scan(typename Relocs<RelTy>::const_iterator &i, RelType type, |
| 58 | int64_t addend); |
| 59 | void scanEhSection(EhInputSection &s); |
| 60 | |
| 61 | template <class ELFT, class RelTy> |
| 62 | int64_t getAddend(const RelTy &r, RelType type); |
| 63 | bool maybeReportUndefined(Undefined &sym, uint64_t offset); |
| 64 | bool checkTlsLe(uint64_t offset, Symbol &sym, RelType type); |
| 65 | bool isStaticLinkTimeConstant(RelExpr e, RelType type, const Symbol &sym, |
| 66 | uint64_t relOff) const; |
| 67 | void process(RelExpr expr, RelType type, uint64_t offset, Symbol &sym, |
| 68 | int64_t addend) const; |
| 69 | // Process relocation after needsGot/needsPlt flags are already handled. |
| 70 | void processAux(RelExpr expr, RelType type, uint64_t offset, Symbol &sym, |
| 71 | int64_t addend) const; |
| 72 | unsigned handleTlsRelocation(RelExpr expr, RelType type, uint64_t offset, |
| 73 | Symbol &sym, int64_t addend); |
| 74 | |
| 75 | // Process R_PC relocations. These are the most common relocation type, so we |
| 76 | // inline the isStaticLinkTimeConstant check. |
| 77 | void processR_PC(RelType type, uint64_t offset, int64_t addend, Symbol &sym) { |
| 78 | if (LLVM_UNLIKELY(sym.isGnuIFunc())) |
| 79 | sym.setFlags(HAS_DIRECT_RELOC); |
| 80 | if (sym.isPreemptible || (isAbsolute(sym) && ctx.arg.isPic)) |
| 81 | processAux(expr: R_PC, type, offset, sym, addend); |
| 82 | else |
| 83 | sec->addReloc(r: {.expr: R_PC, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 84 | } |
| 85 | |
| 86 | // Process R_PLT_PC relocations. These are very common (calls), so we inline |
| 87 | // the isStaticLinkTimeConstant check. Non-preemptible symbols are optimized |
| 88 | // to R_PC (direct call). |
| 89 | void processR_PLT_PC(RelType type, uint64_t offset, int64_t addend, |
| 90 | Symbol &sym) { |
| 91 | if (LLVM_UNLIKELY(sym.isGnuIFunc())) { |
| 92 | process(expr: R_PLT_PC, type, offset, sym, addend); |
| 93 | return; |
| 94 | } |
| 95 | if (sym.isPreemptible) { |
| 96 | sym.setFlags(NEEDS_PLT); |
| 97 | sec->addReloc(r: {.expr: R_PLT_PC, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 98 | } else if (!(isAbsolute(sym) && ctx.arg.isPic)) { |
| 99 | sec->addReloc(r: {.expr: R_PC, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 100 | } else { |
| 101 | processAux(expr: R_PC, type, offset, sym, addend); |
| 102 | } |
| 103 | } |
| 104 | |
| 105 | // Handle TLS Initial-Exec relocation. |
| 106 | template <bool enableIeToLe = true> |
| 107 | void handleTlsIe(RelExpr ieExpr, RelType type, uint64_t offset, |
| 108 | int64_t addend, Symbol &sym) { |
| 109 | if (enableIeToLe && !ctx.arg.shared && !sym.isPreemptible) { |
| 110 | // Optimize to Local Exec. |
| 111 | sec->addReloc(r: {.expr: R_TPREL, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 112 | } else { |
| 113 | sym.setFlags(NEEDS_TLSIE); |
| 114 | // R_GOT (absolute GOT address) needs a RELATIVE dynamic relocation in |
| 115 | // PIC when the relocation uses the full address (not just low page bits). |
| 116 | if (ieExpr == R_GOT && ctx.arg.isPic && |
| 117 | !ctx.target->usesOnlyLowPageBits(type)) |
| 118 | sec->getPartition(ctx).relaDyn->addRelativeReloc( |
| 119 | dynType: ctx.target->relativeRel, isec&: *sec, offsetInSec: offset, sym, addend, addendRelType: type, expr: ieExpr); |
| 120 | else |
| 121 | sec->addReloc(r: {.expr: ieExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 122 | } |
| 123 | } |
| 124 | |
| 125 | // Handle TLS Local-Dynamic relocation. Returns true if the __tls_get_addr |
| 126 | // call should be skipped (i.e., caller should ++it). |
| 127 | bool handleTlsLd(RelExpr sharedExpr, RelType type, uint64_t offset, |
| 128 | int64_t addend, Symbol &sym) { |
| 129 | if (ctx.arg.shared) { |
| 130 | ctx.needsTlsLd.store(i: true, m: std::memory_order_relaxed); |
| 131 | sec->addReloc(r: {.expr: sharedExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 132 | return false; |
| 133 | } |
| 134 | // Optimize to Local Exec. |
| 135 | sec->addReloc(r: {.expr: R_TPREL, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 136 | return true; |
| 137 | } |
| 138 | |
| 139 | // Handle TLS General-Dynamic relocation. Returns true if the __tls_get_addr |
| 140 | // call should be skipped (i.e., caller should ++it). |
| 141 | bool handleTlsGd(RelExpr sharedExpr, RelExpr ieExpr, RelExpr leExpr, |
| 142 | RelType type, uint64_t offset, int64_t addend, Symbol &sym) { |
| 143 | if (ctx.arg.shared) { |
| 144 | sym.setFlags(NEEDS_TLSGD); |
| 145 | sec->addReloc(r: {.expr: sharedExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 146 | return false; |
| 147 | } |
| 148 | if (sym.isPreemptible) { |
| 149 | // Optimize to Initial Exec. |
| 150 | sym.setFlags(NEEDS_TLSIE); |
| 151 | sec->addReloc(r: {.expr: ieExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 152 | } else { |
| 153 | // Optimize to Local Exec. |
| 154 | sec->addReloc(r: {.expr: leExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 155 | } |
| 156 | return true; |
| 157 | } |
| 158 | |
| 159 | // Handle TLSDESC relocation. |
| 160 | void handleTlsDesc(RelExpr sharedExpr, RelExpr ieExpr, RelType type, |
| 161 | uint64_t offset, int64_t addend, Symbol &sym) { |
| 162 | if (ctx.arg.shared) { |
| 163 | // NEEDS_TLSDESC_NONAUTH is a no-op for non-AArch64 targets and detects |
| 164 | // incompatibility with NEEDS_TLSDESC_AUTH. |
| 165 | sym.setFlags(NEEDS_TLSDESC | NEEDS_TLSDESC_NONAUTH); |
| 166 | sec->addReloc(r: {.expr: sharedExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 167 | } else if (sym.isPreemptible) { |
| 168 | // Optimize to Initial Exec. |
| 169 | sym.setFlags(NEEDS_TLSIE); |
| 170 | sec->addReloc(r: {.expr: ieExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 171 | } else { |
| 172 | // Optimize to Local Exec. |
| 173 | sec->addReloc(r: {.expr: R_TPREL, .type: type, .offset: offset, .addend: addend, .sym: &sym}); |
| 174 | } |
| 175 | } |
| 176 | }; |
| 177 | |
| 178 | template <class ELFT, class RelTy> |
| 179 | int64_t RelocScan::getAddend(const RelTy &r, RelType type) { |
| 180 | return RelTy::HasAddend ? elf::getAddend<ELFT>(r) |
| 181 | : ctx.target->getImplicitAddend( |
| 182 | buf: sec->content().data() + r.r_offset, type); |
| 183 | } |
| 184 | |
| 185 | template <class ELFT, class RelTy> |
| 186 | void RelocScan::scan(typename Relocs<RelTy>::const_iterator &it, RelType type, |
| 187 | int64_t addend) { |
| 188 | const RelTy &rel = *it; |
| 189 | uint32_t symIdx = rel.getSymbol(false); |
| 190 | Symbol &sym = sec->getFile<ELFT>()->getSymbol(symIdx); |
| 191 | uint64_t offset = rel.r_offset; |
| 192 | RelExpr expr = |
| 193 | ctx.target->getRelExpr(type, s: sym, loc: sec->content().data() + offset); |
| 194 | |
| 195 | // Ignore R_*_NONE and other marker relocations. |
| 196 | if (expr == R_NONE) |
| 197 | return; |
| 198 | |
| 199 | // Error if the target symbol is undefined. Symbol index 0 may be used by |
| 200 | // marker relocations, e.g. R_*_NONE and R_ARM_V4BX. Don't error on them. |
| 201 | if (sym.isUndefined() && symIdx != 0 && |
| 202 | maybeReportUndefined(sym&: cast<Undefined>(Val&: sym), offset)) |
| 203 | return; |
| 204 | |
| 205 | // Ensure GOT or GOTPLT is created for relocations that reference their base |
| 206 | // addresses without directly creating entries. |
| 207 | if (oneof<R_GOTPLTREL, R_GOTPLT, R_TLSGD_GOTPLT>(expr)) { |
| 208 | ctx.in.gotPlt->hasGotPltOffRel.store(i: true, m: std::memory_order_relaxed); |
| 209 | } else if (oneof<R_GOTONLY_PC, R_GOTREL, RE_PPC32_PLTREL>(expr)) { |
| 210 | ctx.in.got->hasGotOffRel.store(i: true, m: std::memory_order_relaxed); |
| 211 | } |
| 212 | |
| 213 | // Process TLS relocations, including TLS optimizations. Note that |
| 214 | // R_TPREL and R_TPREL_NEG relocations are resolved in processAux. |
| 215 | // |
| 216 | // Some RISCV TLSDESC relocations reference a local NOTYPE symbol, |
| 217 | // but we need to process them in handleTlsRelocation. |
| 218 | if (sym.isTls() || oneof<R_TLSDESC_PC, R_TLSDESC_CALL>(expr)) { |
| 219 | if (unsigned processed = |
| 220 | handleTlsRelocation(expr, type, offset, sym, addend)) { |
| 221 | it += processed - 1; |
| 222 | return; |
| 223 | } |
| 224 | } |
| 225 | |
| 226 | process(expr, type, offset, sym, addend); |
| 227 | } |
| 228 | |
| 229 | // Dispatch to target-specific scanSectionImpl based on relocation format. |
| 230 | template <class Target, class ELFT> |
| 231 | void scanSection1(Target &target, InputSectionBase &sec) { |
| 232 | const RelsOrRelas<ELFT> rels = sec.template relsOrRelas<ELFT>(); |
| 233 | if (rels.areRelocsCrel()) |
| 234 | target.template scanSectionImpl<ELFT>(sec, rels.crels); |
| 235 | else if (rels.areRelocsRel()) |
| 236 | target.template scanSectionImpl<ELFT>(sec, rels.rels); |
| 237 | else |
| 238 | target.template scanSectionImpl<ELFT>(sec, rels.relas); |
| 239 | } |
| 240 | |
| 241 | } // namespace lld::elf |
| 242 | |
| 243 | #endif |
| 244 | |