1//===------------------------------------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef LLD_ELF_RELOCSCAN_H
10#define LLD_ELF_RELOCSCAN_H
11
12#include "Config.h"
13#include "InputFiles.h"
14#include "InputSection.h"
15#include "Relocations.h"
16#include "SyntheticSections.h"
17#include "Target.h"
18
19using namespace llvm;
20using namespace llvm::ELF;
21using namespace llvm::object;
22
23namespace lld::elf {
24
25// Build a bitmask with one bit set for each 64 subset of RelExpr.
26inline constexpr uint64_t buildMask() { return 0; }
27
28template <typename... Tails>
29inline constexpr uint64_t buildMask(int head, Tails... tails) {
30 return (0 <= head && head < 64 ? uint64_t(1) << head : 0) |
31 buildMask(tails...);
32}
33
34// Return true if `Expr` is one of `Exprs`.
35// There are more than 64 but less than 128 RelExprs, so we divide the set of
36// exprs into [0, 64) and [64, 128) and represent each range as a constant
37// 64-bit mask. Then we decide which mask to test depending on the value of
38// expr and use a simple shift and bitwise-and to test for membership.
39template <RelExpr... Exprs> bool oneof(RelExpr expr) {
40 assert(0 <= expr && (int)expr < 128 &&
41 "RelExpr is too large for 128-bit mask!");
42
43 if (expr >= 64)
44 return (uint64_t(1) << (expr - 64)) & buildMask((Exprs - 64)...);
45 return (uint64_t(1) << expr) & buildMask(Exprs...);
46}
47
48// This class encapsulates states needed to scan relocations for one
49// InputSectionBase.
50class RelocScan {
51public:
52 Ctx &ctx;
53 InputSectionBase *sec;
54
55 RelocScan(Ctx &ctx, InputSectionBase *sec = nullptr) : ctx(ctx), sec(sec) {}
56 template <class ELFT, class RelTy>
57 void scan(typename Relocs<RelTy>::const_iterator &i, RelType type,
58 int64_t addend);
59 void scanEhSection(EhInputSection &s);
60
61 template <class ELFT, class RelTy>
62 int64_t getAddend(const RelTy &r, RelType type);
63 bool maybeReportUndefined(Undefined &sym, uint64_t offset);
64 bool checkTlsLe(uint64_t offset, Symbol &sym, RelType type);
65 bool isStaticLinkTimeConstant(RelExpr e, RelType type, const Symbol &sym,
66 uint64_t relOff) const;
67 void process(RelExpr expr, RelType type, uint64_t offset, Symbol &sym,
68 int64_t addend) const;
69 // Process relocation after needsGot/needsPlt flags are already handled.
70 void processAux(RelExpr expr, RelType type, uint64_t offset, Symbol &sym,
71 int64_t addend) const;
72
73 // Process R_PC relocations. These are the most common relocation type, so we
74 // inline the isStaticLinkTimeConstant check.
75 void processR_PC(RelType type, uint64_t offset, int64_t addend, Symbol &sym) {
76 if (LLVM_UNLIKELY(sym.isGnuIFunc()))
77 sym.setFlags(HAS_DIRECT_RELOC);
78 if (sym.isPreemptible || (isAbsolute(sym) && ctx.arg.isPic))
79 processAux(expr: R_PC, type, offset, sym, addend);
80 else
81 sec->addReloc(r: {.expr: R_PC, .type: type, .offset: offset, .addend: addend, .sym: &sym});
82 }
83
84 // Process R_PLT_PC relocations. These are very common (calls), so we inline
85 // the isStaticLinkTimeConstant check. Non-preemptible symbols are optimized
86 // to R_PC (direct call).
87 void processR_PLT_PC(RelType type, uint64_t offset, int64_t addend,
88 Symbol &sym) {
89 if (LLVM_UNLIKELY(sym.isGnuIFunc())) {
90 process(expr: R_PLT_PC, type, offset, sym, addend);
91 return;
92 }
93 if (sym.isPreemptible) {
94 sym.setFlags(NEEDS_PLT);
95 sec->addReloc(r: {.expr: R_PLT_PC, .type: type, .offset: offset, .addend: addend, .sym: &sym});
96 } else if (!(isAbsolute(sym) && ctx.arg.isPic)) {
97 sec->addReloc(r: {.expr: R_PC, .type: type, .offset: offset, .addend: addend, .sym: &sym});
98 } else {
99 processAux(expr: R_PC, type, offset, sym, addend);
100 }
101 }
102
103 // Handle TLS Initial-Exec relocation.
104 template <bool enableIeToLe = true>
105 void handleTlsIe(RelExpr ieExpr, RelType type, uint64_t offset,
106 int64_t addend, Symbol &sym) {
107 if (enableIeToLe && !ctx.arg.shared && !sym.isPreemptible) {
108 // Optimize to Local Exec.
109 sec->addReloc(r: {.expr: R_TPREL, .type: type, .offset: offset, .addend: addend, .sym: &sym});
110 } else {
111 sym.setFlags(NEEDS_TLSIE);
112 // R_GOT (absolute GOT address) needs a RELATIVE dynamic relocation in
113 // PIC when the relocation uses the full address (not just low page bits).
114 if (ieExpr == R_GOT && ctx.arg.isPic &&
115 !ctx.target->usesOnlyLowPageBits(type))
116 sec->getPartition(ctx).relaDyn->addRelativeReloc(
117 dynType: ctx.target->relativeRel, isec&: *sec, offsetInSec: offset, sym, addend, addendRelType: type, expr: ieExpr);
118 else
119 sec->addReloc(r: {.expr: ieExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym});
120 }
121 }
122
123 // Handle TLS Local-Dynamic relocation. Returns true if the __tls_get_addr
124 // call should be skipped (i.e., caller should ++it).
125 bool handleTlsLd(RelExpr sharedExpr, RelType type, uint64_t offset,
126 int64_t addend, Symbol &sym) {
127 if (ctx.arg.shared) {
128 ctx.needsTlsLd.store(i: true, m: std::memory_order_relaxed);
129 sec->addReloc(r: {.expr: sharedExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym});
130 return false;
131 }
132 // Optimize to Local Exec.
133 sec->addReloc(r: {.expr: R_TPREL, .type: type, .offset: offset, .addend: addend, .sym: &sym});
134 return true;
135 }
136
137 // Handle TLS General-Dynamic relocation. Returns true if the __tls_get_addr
138 // call should be skipped (i.e., caller should ++it). Pass R_NONE for
139 // ieExpr/leExpr to disable GD-to-IE/LE optimization (e.g. ARM, RISC-V).
140 bool handleTlsGd(RelExpr sharedExpr, RelExpr ieExpr, RelExpr leExpr,
141 RelType type, uint64_t offset, int64_t addend, Symbol &sym) {
142 if (!ctx.arg.shared && ieExpr != R_NONE) {
143 if (sym.isPreemptible) {
144 // Optimize to Initial Exec.
145 sym.setFlags(NEEDS_TLSIE);
146 sec->addReloc(r: {.expr: ieExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym});
147 } else {
148 // Optimize to Local Exec.
149 sec->addReloc(r: {.expr: leExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym});
150 }
151 return true;
152 }
153 sym.setFlags(NEEDS_TLSGD);
154 sec->addReloc(r: {.expr: sharedExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym});
155 return false;
156 }
157
158 // Handle TLSDESC relocation.
159 void handleTlsDesc(RelExpr sharedExpr, RelExpr ieExpr, RelType type,
160 uint64_t offset, int64_t addend, Symbol &sym) {
161 if (ctx.arg.shared) {
162 // NEEDS_TLSDESC_NONAUTH is a no-op for non-AArch64 targets and detects
163 // incompatibility with NEEDS_TLSDESC_AUTH.
164 sym.setFlags(NEEDS_TLSDESC | NEEDS_TLSDESC_NONAUTH);
165 sec->addReloc(r: {.expr: sharedExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym});
166 } else if (sym.isPreemptible) {
167 // Optimize to Initial Exec.
168 sym.setFlags(NEEDS_TLSIE);
169 sec->addReloc(r: {.expr: ieExpr, .type: type, .offset: offset, .addend: addend, .sym: &sym});
170 } else {
171 // Optimize to Local Exec.
172 sec->addReloc(r: {.expr: R_TPREL, .type: type, .offset: offset, .addend: addend, .sym: &sym});
173 }
174 }
175};
176
177template <class ELFT, class RelTy>
178int64_t RelocScan::getAddend(const RelTy &r, RelType type) {
179 return RelTy::HasAddend ? elf::getAddend<ELFT>(r)
180 : ctx.target->getImplicitAddend(
181 buf: sec->content().data() + r.r_offset, type);
182}
183
184template <class ELFT, class RelTy>
185void RelocScan::scan(typename Relocs<RelTy>::const_iterator &it, RelType type,
186 int64_t addend) {
187 const RelTy &rel = *it;
188 uint32_t symIdx = rel.getSymbol(false);
189 Symbol &sym = sec->getFile<ELFT>()->getSymbol(symIdx);
190 uint64_t offset = rel.r_offset;
191 RelExpr expr =
192 ctx.target->getRelExpr(type, s: sym, loc: sec->content().data() + offset);
193
194 // Ignore R_*_NONE and other marker relocations.
195 if (expr == R_NONE)
196 return;
197
198 // Error if the target symbol is undefined. Symbol index 0 may be used by
199 // marker relocations, e.g. R_*_NONE and R_ARM_V4BX. Don't error on them.
200 if (sym.isUndefined() && symIdx != 0 &&
201 maybeReportUndefined(sym&: cast<Undefined>(Val&: sym), offset))
202 return;
203
204 // Ensure GOT or GOTPLT is created for relocations that reference their base
205 // addresses without directly creating entries.
206 if (oneof<R_GOTPLTREL, R_GOTPLT, R_TLSGD_GOTPLT>(expr)) {
207 ctx.in.gotPlt->hasGotPltOffRel.store(i: true, m: std::memory_order_relaxed);
208 } else if (oneof<R_GOTONLY_PC, R_GOTREL, RE_PPC32_PLTREL>(expr)) {
209 ctx.in.got->hasGotOffRel.store(i: true, m: std::memory_order_relaxed);
210 }
211
212 process(expr, type, offset, sym, addend);
213}
214
215// Dispatch to target-specific scanSectionImpl based on relocation format.
216template <class Target, class ELFT>
217void scanSection1(Target &target, InputSectionBase &sec) {
218 const RelsOrRelas<ELFT> rels = sec.template relsOrRelas<ELFT>();
219 if (rels.areRelocsCrel())
220 target.template scanSectionImpl<ELFT>(sec, rels.crels);
221 else if (rels.areRelocsRel())
222 target.template scanSectionImpl<ELFT>(sec, rels.rels);
223 else
224 target.template scanSectionImpl<ELFT>(sec, rels.relas);
225}
226
227} // namespace lld::elf
228
229#endif
230