/src/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // This file handles ELF-specific object emission, converting LLVM's internal |
10 | | // fixups into the appropriate relocations. |
11 | | // |
12 | | //===----------------------------------------------------------------------===// |
13 | | |
14 | | #include "MCTargetDesc/AArch64FixupKinds.h" |
15 | | #include "MCTargetDesc/AArch64MCExpr.h" |
16 | | #include "MCTargetDesc/AArch64MCTargetDesc.h" |
17 | | #include "llvm/BinaryFormat/ELF.h" |
18 | | #include "llvm/MC/MCContext.h" |
19 | | #include "llvm/MC/MCELFObjectWriter.h" |
20 | | #include "llvm/MC/MCFixup.h" |
21 | | #include "llvm/MC/MCObjectWriter.h" |
22 | | #include "llvm/MC/MCValue.h" |
23 | | #include "llvm/Support/ErrorHandling.h" |
24 | | #include <cassert> |
25 | | #include <cstdint> |
26 | | |
27 | | using namespace llvm; |
28 | | |
29 | | namespace { |
30 | | |
31 | | class AArch64ELFObjectWriter : public MCELFObjectTargetWriter { |
32 | | public: |
33 | | AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32); |
34 | | |
35 | | ~AArch64ELFObjectWriter() override = default; |
36 | | |
37 | | MCSectionELF *getMemtagRelocsSection(MCContext &Ctx) const override; |
38 | | |
39 | | protected: |
40 | | unsigned getRelocType(MCContext &Ctx, const MCValue &Target, |
41 | | const MCFixup &Fixup, bool IsPCRel) const override; |
42 | | bool needsRelocateWithSymbol(const MCValue &Val, const MCSymbol &Sym, |
43 | | unsigned Type) const override; |
44 | | bool IsILP32; |
45 | | }; |
46 | | |
47 | | } // end anonymous namespace |
48 | | |
49 | | AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32) |
50 | | : MCELFObjectTargetWriter(/*Is64Bit*/ !IsILP32, OSABI, ELF::EM_AARCH64, |
51 | | /*HasRelocationAddend*/ true), |
52 | 0 | IsILP32(IsILP32) {} |
53 | | |
54 | | #define R_CLS(rtype) \ |
55 | 0 | IsILP32 ? ELF::R_AARCH64_P32_##rtype : ELF::R_AARCH64_##rtype |
56 | | #define BAD_ILP32_MOV(lp64rtype) \ |
57 | 0 | "ILP32 absolute MOV relocation not " \ |
58 | 0 | "supported (LP64 eqv: " #lp64rtype ")" |
59 | | |
60 | | // assumes IsILP32 is true |
61 | | static bool isNonILP32reloc(const MCFixup &Fixup, |
62 | | AArch64MCExpr::VariantKind RefKind, |
63 | 0 | MCContext &Ctx) { |
64 | 0 | if (Fixup.getTargetKind() != AArch64::fixup_aarch64_movw) |
65 | 0 | return false; |
66 | 0 | switch (RefKind) { |
67 | 0 | case AArch64MCExpr::VK_ABS_G3: |
68 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3)); |
69 | 0 | return true; |
70 | 0 | case AArch64MCExpr::VK_ABS_G2: |
71 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2)); |
72 | 0 | return true; |
73 | 0 | case AArch64MCExpr::VK_ABS_G2_S: |
74 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2)); |
75 | 0 | return true; |
76 | 0 | case AArch64MCExpr::VK_ABS_G2_NC: |
77 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC)); |
78 | 0 | return true; |
79 | 0 | case AArch64MCExpr::VK_ABS_G1_S: |
80 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1)); |
81 | 0 | return true; |
82 | 0 | case AArch64MCExpr::VK_ABS_G1_NC: |
83 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC)); |
84 | 0 | return true; |
85 | 0 | case AArch64MCExpr::VK_DTPREL_G2: |
86 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2)); |
87 | 0 | return true; |
88 | 0 | case AArch64MCExpr::VK_DTPREL_G1_NC: |
89 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC)); |
90 | 0 | return true; |
91 | 0 | case AArch64MCExpr::VK_TPREL_G2: |
92 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2)); |
93 | 0 | return true; |
94 | 0 | case AArch64MCExpr::VK_TPREL_G1_NC: |
95 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC)); |
96 | 0 | return true; |
97 | 0 | case AArch64MCExpr::VK_GOTTPREL_G1: |
98 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1)); |
99 | 0 | return true; |
100 | 0 | case AArch64MCExpr::VK_GOTTPREL_G0_NC: |
101 | 0 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC)); |
102 | 0 | return true; |
103 | 0 | default: |
104 | 0 | return false; |
105 | 0 | } |
106 | 0 | return false; |
107 | 0 | } |
108 | | |
109 | | unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx, |
110 | | const MCValue &Target, |
111 | | const MCFixup &Fixup, |
112 | 0 | bool IsPCRel) const { |
113 | 0 | unsigned Kind = Fixup.getTargetKind(); |
114 | 0 | if (Kind >= FirstLiteralRelocationKind) |
115 | 0 | return Kind - FirstLiteralRelocationKind; |
116 | 0 | AArch64MCExpr::VariantKind RefKind = |
117 | 0 | static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind()); |
118 | 0 | AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind); |
119 | 0 | bool IsNC = AArch64MCExpr::isNotChecked(RefKind); |
120 | |
|
121 | 0 | assert((!Target.getSymA() || |
122 | 0 | Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None || |
123 | 0 | Target.getSymA()->getKind() == MCSymbolRefExpr::VK_PLT || |
124 | 0 | Target.getSymA()->getKind() == MCSymbolRefExpr::VK_GOTPCREL) && |
125 | 0 | "Should only be expression-level modifiers here"); |
126 | | |
127 | 0 | assert((!Target.getSymB() || |
128 | 0 | Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) && |
129 | 0 | "Should only be expression-level modifiers here"); |
130 | | |
131 | 0 | if (IsPCRel) { |
132 | 0 | switch (Kind) { |
133 | 0 | case FK_Data_1: |
134 | 0 | Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported"); |
135 | 0 | return ELF::R_AARCH64_NONE; |
136 | 0 | case FK_Data_2: |
137 | 0 | return R_CLS(PREL16); |
138 | 0 | case FK_Data_4: { |
139 | 0 | return Target.getAccessVariant() == MCSymbolRefExpr::VK_PLT |
140 | 0 | ? R_CLS(PLT32) |
141 | 0 | : R_CLS(PREL32); |
142 | 0 | } |
143 | 0 | case FK_Data_8: |
144 | 0 | if (IsILP32) { |
145 | 0 | Ctx.reportError(Fixup.getLoc(), |
146 | 0 | "ILP32 8 byte PC relative data " |
147 | 0 | "relocation not supported (LP64 eqv: PREL64)"); |
148 | 0 | return ELF::R_AARCH64_NONE; |
149 | 0 | } else |
150 | 0 | return ELF::R_AARCH64_PREL64; |
151 | 0 | case AArch64::fixup_aarch64_pcrel_adr_imm21: |
152 | 0 | if (SymLoc != AArch64MCExpr::VK_ABS) |
153 | 0 | Ctx.reportError(Fixup.getLoc(), |
154 | 0 | "invalid symbol kind for ADR relocation"); |
155 | 0 | return R_CLS(ADR_PREL_LO21); |
156 | 0 | case AArch64::fixup_aarch64_pcrel_adrp_imm21: |
157 | 0 | if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC) |
158 | 0 | return R_CLS(ADR_PREL_PG_HI21); |
159 | 0 | if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) { |
160 | 0 | if (IsILP32) { |
161 | 0 | Ctx.reportError(Fixup.getLoc(), |
162 | 0 | "invalid fixup for 32-bit pcrel ADRP instruction " |
163 | 0 | "VK_ABS VK_NC"); |
164 | 0 | return ELF::R_AARCH64_NONE; |
165 | 0 | } else { |
166 | 0 | return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC; |
167 | 0 | } |
168 | 0 | } |
169 | 0 | if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) |
170 | 0 | return R_CLS(ADR_GOT_PAGE); |
171 | 0 | if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC) |
172 | 0 | return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21); |
173 | 0 | if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) |
174 | 0 | return R_CLS(TLSDESC_ADR_PAGE21); |
175 | 0 | Ctx.reportError(Fixup.getLoc(), |
176 | 0 | "invalid symbol kind for ADRP relocation"); |
177 | 0 | return ELF::R_AARCH64_NONE; |
178 | 0 | case AArch64::fixup_aarch64_pcrel_branch26: |
179 | 0 | return R_CLS(JUMP26); |
180 | 0 | case AArch64::fixup_aarch64_pcrel_call26: |
181 | 0 | return R_CLS(CALL26); |
182 | 0 | case AArch64::fixup_aarch64_ldr_pcrel_imm19: |
183 | 0 | if (SymLoc == AArch64MCExpr::VK_GOTTPREL) |
184 | 0 | return R_CLS(TLSIE_LD_GOTTPREL_PREL19); |
185 | 0 | if (SymLoc == AArch64MCExpr::VK_GOT) |
186 | 0 | return R_CLS(GOT_LD_PREL19); |
187 | 0 | return R_CLS(LD_PREL_LO19); |
188 | 0 | case AArch64::fixup_aarch64_pcrel_branch14: |
189 | 0 | return R_CLS(TSTBR14); |
190 | 0 | case AArch64::fixup_aarch64_pcrel_branch16: |
191 | 0 | Ctx.reportError(Fixup.getLoc(), |
192 | 0 | "relocation of PAC/AUT instructions is not supported"); |
193 | 0 | return ELF::R_AARCH64_NONE; |
194 | 0 | case AArch64::fixup_aarch64_pcrel_branch19: |
195 | 0 | return R_CLS(CONDBR19); |
196 | 0 | default: |
197 | 0 | Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind"); |
198 | 0 | return ELF::R_AARCH64_NONE; |
199 | 0 | } |
200 | 0 | } else { |
201 | 0 | if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx)) |
202 | 0 | return ELF::R_AARCH64_NONE; |
203 | 0 | switch (Fixup.getTargetKind()) { |
204 | 0 | case FK_Data_1: |
205 | 0 | Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported"); |
206 | 0 | return ELF::R_AARCH64_NONE; |
207 | 0 | case FK_Data_2: |
208 | 0 | return R_CLS(ABS16); |
209 | 0 | case FK_Data_4: |
210 | 0 | return (!IsILP32 && |
211 | 0 | Target.getAccessVariant() == MCSymbolRefExpr::VK_GOTPCREL) |
212 | 0 | ? ELF::R_AARCH64_GOTPCREL32 |
213 | 0 | : R_CLS(ABS32); |
214 | 0 | case FK_Data_8: |
215 | 0 | if (IsILP32) { |
216 | 0 | Ctx.reportError(Fixup.getLoc(), |
217 | 0 | "ILP32 8 byte absolute data " |
218 | 0 | "relocation not supported (LP64 eqv: ABS64)"); |
219 | 0 | return ELF::R_AARCH64_NONE; |
220 | 0 | } else { |
221 | 0 | if (RefKind == AArch64MCExpr::VK_AUTH || |
222 | 0 | RefKind == AArch64MCExpr::VK_AUTHADDR) |
223 | 0 | return ELF::R_AARCH64_AUTH_ABS64; |
224 | 0 | return ELF::R_AARCH64_ABS64; |
225 | 0 | } |
226 | 0 | case AArch64::fixup_aarch64_add_imm12: |
227 | 0 | if (RefKind == AArch64MCExpr::VK_DTPREL_HI12) |
228 | 0 | return R_CLS(TLSLD_ADD_DTPREL_HI12); |
229 | 0 | if (RefKind == AArch64MCExpr::VK_TPREL_HI12) |
230 | 0 | return R_CLS(TLSLE_ADD_TPREL_HI12); |
231 | 0 | if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC) |
232 | 0 | return R_CLS(TLSLD_ADD_DTPREL_LO12_NC); |
233 | 0 | if (RefKind == AArch64MCExpr::VK_DTPREL_LO12) |
234 | 0 | return R_CLS(TLSLD_ADD_DTPREL_LO12); |
235 | 0 | if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC) |
236 | 0 | return R_CLS(TLSLE_ADD_TPREL_LO12_NC); |
237 | 0 | if (RefKind == AArch64MCExpr::VK_TPREL_LO12) |
238 | 0 | return R_CLS(TLSLE_ADD_TPREL_LO12); |
239 | 0 | if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12) |
240 | 0 | return R_CLS(TLSDESC_ADD_LO12); |
241 | 0 | if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) |
242 | 0 | return R_CLS(ADD_ABS_LO12_NC); |
243 | | |
244 | 0 | Ctx.reportError(Fixup.getLoc(), |
245 | 0 | "invalid fixup for add (uimm12) instruction"); |
246 | 0 | return ELF::R_AARCH64_NONE; |
247 | 0 | case AArch64::fixup_aarch64_ldst_imm12_scale1: |
248 | 0 | if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) |
249 | 0 | return R_CLS(LDST8_ABS_LO12_NC); |
250 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) |
251 | 0 | return R_CLS(TLSLD_LDST8_DTPREL_LO12); |
252 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) |
253 | 0 | return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC); |
254 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) |
255 | 0 | return R_CLS(TLSLE_LDST8_TPREL_LO12); |
256 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) |
257 | 0 | return R_CLS(TLSLE_LDST8_TPREL_LO12_NC); |
258 | | |
259 | 0 | Ctx.reportError(Fixup.getLoc(), |
260 | 0 | "invalid fixup for 8-bit load/store instruction"); |
261 | 0 | return ELF::R_AARCH64_NONE; |
262 | 0 | case AArch64::fixup_aarch64_ldst_imm12_scale2: |
263 | 0 | if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) |
264 | 0 | return R_CLS(LDST16_ABS_LO12_NC); |
265 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) |
266 | 0 | return R_CLS(TLSLD_LDST16_DTPREL_LO12); |
267 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) |
268 | 0 | return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC); |
269 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) |
270 | 0 | return R_CLS(TLSLE_LDST16_TPREL_LO12); |
271 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) |
272 | 0 | return R_CLS(TLSLE_LDST16_TPREL_LO12_NC); |
273 | | |
274 | 0 | Ctx.reportError(Fixup.getLoc(), |
275 | 0 | "invalid fixup for 16-bit load/store instruction"); |
276 | 0 | return ELF::R_AARCH64_NONE; |
277 | 0 | case AArch64::fixup_aarch64_ldst_imm12_scale4: |
278 | 0 | if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) |
279 | 0 | return R_CLS(LDST32_ABS_LO12_NC); |
280 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) |
281 | 0 | return R_CLS(TLSLD_LDST32_DTPREL_LO12); |
282 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) |
283 | 0 | return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC); |
284 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) |
285 | 0 | return R_CLS(TLSLE_LDST32_TPREL_LO12); |
286 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) |
287 | 0 | return R_CLS(TLSLE_LDST32_TPREL_LO12_NC); |
288 | 0 | if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) { |
289 | 0 | if (IsILP32) { |
290 | 0 | return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC; |
291 | 0 | } else { |
292 | 0 | Ctx.reportError(Fixup.getLoc(), |
293 | 0 | "LP64 4 byte unchecked GOT load/store relocation " |
294 | 0 | "not supported (ILP32 eqv: LD32_GOT_LO12_NC"); |
295 | 0 | return ELF::R_AARCH64_NONE; |
296 | 0 | } |
297 | 0 | } |
298 | 0 | if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) { |
299 | 0 | if (IsILP32) { |
300 | 0 | Ctx.reportError(Fixup.getLoc(), |
301 | 0 | "ILP32 4 byte checked GOT load/store relocation " |
302 | 0 | "not supported (unchecked eqv: LD32_GOT_LO12_NC)"); |
303 | 0 | } else { |
304 | 0 | Ctx.reportError(Fixup.getLoc(), |
305 | 0 | "LP64 4 byte checked GOT load/store relocation " |
306 | 0 | "not supported (unchecked/ILP32 eqv: " |
307 | 0 | "LD32_GOT_LO12_NC)"); |
308 | 0 | } |
309 | 0 | return ELF::R_AARCH64_NONE; |
310 | 0 | } |
311 | 0 | if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) { |
312 | 0 | if (IsILP32) { |
313 | 0 | return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC; |
314 | 0 | } else { |
315 | 0 | Ctx.reportError(Fixup.getLoc(), |
316 | 0 | "LP64 32-bit load/store " |
317 | 0 | "relocation not supported (ILP32 eqv: " |
318 | 0 | "TLSIE_LD32_GOTTPREL_LO12_NC)"); |
319 | 0 | return ELF::R_AARCH64_NONE; |
320 | 0 | } |
321 | 0 | } |
322 | 0 | if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) { |
323 | 0 | if (IsILP32) { |
324 | 0 | return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12; |
325 | 0 | } else { |
326 | 0 | Ctx.reportError(Fixup.getLoc(), |
327 | 0 | "LP64 4 byte TLSDESC load/store relocation " |
328 | 0 | "not supported (ILP32 eqv: TLSDESC_LD64_LO12)"); |
329 | 0 | return ELF::R_AARCH64_NONE; |
330 | 0 | } |
331 | 0 | } |
332 | | |
333 | 0 | Ctx.reportError(Fixup.getLoc(), |
334 | 0 | "invalid fixup for 32-bit load/store instruction " |
335 | 0 | "fixup_aarch64_ldst_imm12_scale4"); |
336 | 0 | return ELF::R_AARCH64_NONE; |
337 | 0 | case AArch64::fixup_aarch64_ldst_imm12_scale8: |
338 | 0 | if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) |
339 | 0 | return R_CLS(LDST64_ABS_LO12_NC); |
340 | 0 | if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) { |
341 | 0 | AArch64MCExpr::VariantKind AddressLoc = |
342 | 0 | AArch64MCExpr::getAddressFrag(RefKind); |
343 | 0 | if (!IsILP32) { |
344 | 0 | if (AddressLoc == AArch64MCExpr::VK_LO15) |
345 | 0 | return ELF::R_AARCH64_LD64_GOTPAGE_LO15; |
346 | 0 | return ELF::R_AARCH64_LD64_GOT_LO12_NC; |
347 | 0 | } else { |
348 | 0 | Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " |
349 | 0 | "relocation not supported (LP64 eqv: " |
350 | 0 | "LD64_GOT_LO12_NC)"); |
351 | 0 | return ELF::R_AARCH64_NONE; |
352 | 0 | } |
353 | 0 | } |
354 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) |
355 | 0 | return R_CLS(TLSLD_LDST64_DTPREL_LO12); |
356 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) |
357 | 0 | return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC); |
358 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) |
359 | 0 | return R_CLS(TLSLE_LDST64_TPREL_LO12); |
360 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) |
361 | 0 | return R_CLS(TLSLE_LDST64_TPREL_LO12_NC); |
362 | 0 | if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) { |
363 | 0 | if (!IsILP32) { |
364 | 0 | return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC; |
365 | 0 | } else { |
366 | 0 | Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " |
367 | 0 | "relocation not supported (LP64 eqv: " |
368 | 0 | "TLSIE_LD64_GOTTPREL_LO12_NC)"); |
369 | 0 | return ELF::R_AARCH64_NONE; |
370 | 0 | } |
371 | 0 | } |
372 | 0 | if (SymLoc == AArch64MCExpr::VK_TLSDESC) { |
373 | 0 | if (!IsILP32) { |
374 | 0 | return ELF::R_AARCH64_TLSDESC_LD64_LO12; |
375 | 0 | } else { |
376 | 0 | Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " |
377 | 0 | "relocation not supported (LP64 eqv: " |
378 | 0 | "TLSDESC_LD64_LO12)"); |
379 | 0 | return ELF::R_AARCH64_NONE; |
380 | 0 | } |
381 | 0 | } |
382 | 0 | Ctx.reportError(Fixup.getLoc(), |
383 | 0 | "invalid fixup for 64-bit load/store instruction"); |
384 | 0 | return ELF::R_AARCH64_NONE; |
385 | 0 | case AArch64::fixup_aarch64_ldst_imm12_scale16: |
386 | 0 | if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) |
387 | 0 | return R_CLS(LDST128_ABS_LO12_NC); |
388 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) |
389 | 0 | return R_CLS(TLSLD_LDST128_DTPREL_LO12); |
390 | 0 | if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) |
391 | 0 | return R_CLS(TLSLD_LDST128_DTPREL_LO12_NC); |
392 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) |
393 | 0 | return R_CLS(TLSLE_LDST128_TPREL_LO12); |
394 | 0 | if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) |
395 | 0 | return R_CLS(TLSLE_LDST128_TPREL_LO12_NC); |
396 | | |
397 | 0 | Ctx.reportError(Fixup.getLoc(), |
398 | 0 | "invalid fixup for 128-bit load/store instruction"); |
399 | 0 | return ELF::R_AARCH64_NONE; |
400 | | // ILP32 case not reached here, tested with isNonILP32reloc |
401 | 0 | case AArch64::fixup_aarch64_movw: |
402 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G3) |
403 | 0 | return ELF::R_AARCH64_MOVW_UABS_G3; |
404 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G2) |
405 | 0 | return ELF::R_AARCH64_MOVW_UABS_G2; |
406 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G2_S) |
407 | 0 | return ELF::R_AARCH64_MOVW_SABS_G2; |
408 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G2_NC) |
409 | 0 | return ELF::R_AARCH64_MOVW_UABS_G2_NC; |
410 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G1) |
411 | 0 | return R_CLS(MOVW_UABS_G1); |
412 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G1_S) |
413 | 0 | return ELF::R_AARCH64_MOVW_SABS_G1; |
414 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G1_NC) |
415 | 0 | return ELF::R_AARCH64_MOVW_UABS_G1_NC; |
416 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G0) |
417 | 0 | return R_CLS(MOVW_UABS_G0); |
418 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G0_S) |
419 | 0 | return R_CLS(MOVW_SABS_G0); |
420 | 0 | if (RefKind == AArch64MCExpr::VK_ABS_G0_NC) |
421 | 0 | return R_CLS(MOVW_UABS_G0_NC); |
422 | 0 | if (RefKind == AArch64MCExpr::VK_PREL_G3) |
423 | 0 | return ELF::R_AARCH64_MOVW_PREL_G3; |
424 | 0 | if (RefKind == AArch64MCExpr::VK_PREL_G2) |
425 | 0 | return ELF::R_AARCH64_MOVW_PREL_G2; |
426 | 0 | if (RefKind == AArch64MCExpr::VK_PREL_G2_NC) |
427 | 0 | return ELF::R_AARCH64_MOVW_PREL_G2_NC; |
428 | 0 | if (RefKind == AArch64MCExpr::VK_PREL_G1) |
429 | 0 | return R_CLS(MOVW_PREL_G1); |
430 | 0 | if (RefKind == AArch64MCExpr::VK_PREL_G1_NC) |
431 | 0 | return ELF::R_AARCH64_MOVW_PREL_G1_NC; |
432 | 0 | if (RefKind == AArch64MCExpr::VK_PREL_G0) |
433 | 0 | return R_CLS(MOVW_PREL_G0); |
434 | 0 | if (RefKind == AArch64MCExpr::VK_PREL_G0_NC) |
435 | 0 | return R_CLS(MOVW_PREL_G0_NC); |
436 | 0 | if (RefKind == AArch64MCExpr::VK_DTPREL_G2) |
437 | 0 | return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2; |
438 | 0 | if (RefKind == AArch64MCExpr::VK_DTPREL_G1) |
439 | 0 | return R_CLS(TLSLD_MOVW_DTPREL_G1); |
440 | 0 | if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC) |
441 | 0 | return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC; |
442 | 0 | if (RefKind == AArch64MCExpr::VK_DTPREL_G0) |
443 | 0 | return R_CLS(TLSLD_MOVW_DTPREL_G0); |
444 | 0 | if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC) |
445 | 0 | return R_CLS(TLSLD_MOVW_DTPREL_G0_NC); |
446 | 0 | if (RefKind == AArch64MCExpr::VK_TPREL_G2) |
447 | 0 | return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2; |
448 | 0 | if (RefKind == AArch64MCExpr::VK_TPREL_G1) |
449 | 0 | return R_CLS(TLSLE_MOVW_TPREL_G1); |
450 | 0 | if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC) |
451 | 0 | return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC; |
452 | 0 | if (RefKind == AArch64MCExpr::VK_TPREL_G0) |
453 | 0 | return R_CLS(TLSLE_MOVW_TPREL_G0); |
454 | 0 | if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC) |
455 | 0 | return R_CLS(TLSLE_MOVW_TPREL_G0_NC); |
456 | 0 | if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1) |
457 | 0 | return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1; |
458 | 0 | if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC) |
459 | 0 | return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC; |
460 | 0 | Ctx.reportError(Fixup.getLoc(), |
461 | 0 | "invalid fixup for movz/movk instruction"); |
462 | 0 | return ELF::R_AARCH64_NONE; |
463 | 0 | default: |
464 | 0 | Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type"); |
465 | 0 | return ELF::R_AARCH64_NONE; |
466 | 0 | } |
467 | 0 | } |
468 | | |
469 | 0 | llvm_unreachable("Unimplemented fixup -> relocation"); |
470 | 0 | } |
471 | | |
472 | | bool AArch64ELFObjectWriter::needsRelocateWithSymbol(const MCValue &Val, |
473 | | const MCSymbol &, |
474 | 0 | unsigned) const { |
475 | 0 | return (Val.getRefKind() & AArch64MCExpr::VK_GOT) == AArch64MCExpr::VK_GOT; |
476 | 0 | } |
477 | | |
478 | | MCSectionELF * |
479 | 0 | AArch64ELFObjectWriter::getMemtagRelocsSection(MCContext &Ctx) const { |
480 | 0 | return Ctx.getELFSection(".memtag.globals.static", |
481 | 0 | ELF::SHT_AARCH64_MEMTAG_GLOBALS_STATIC, 0); |
482 | 0 | } |
483 | | |
484 | | std::unique_ptr<MCObjectTargetWriter> |
485 | 0 | llvm::createAArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32) { |
486 | 0 | return std::make_unique<AArch64ELFObjectWriter>(OSABI, IsILP32); |
487 | 0 | } |