/Users/buildslave/jenkins/sharedspace/clang-stage2-coverage-R@2/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===// |
2 | | // |
3 | | // The LLVM Compiler Infrastructure |
4 | | // |
5 | | // This file is distributed under the University of Illinois Open Source |
6 | | // License. See LICENSE.TXT for details. |
7 | | // |
8 | | //===----------------------------------------------------------------------===// |
9 | | // |
10 | | // This file handles ELF-specific object emission, converting LLVM's internal |
11 | | // fixups into the appropriate relocations. |
12 | | // |
13 | | //===----------------------------------------------------------------------===// |
14 | | |
15 | | #include "MCTargetDesc/AArch64FixupKinds.h" |
16 | | #include "MCTargetDesc/AArch64MCExpr.h" |
17 | | #include "MCTargetDesc/AArch64MCTargetDesc.h" |
18 | | #include "llvm/BinaryFormat/ELF.h" |
19 | | #include "llvm/MC/MCContext.h" |
20 | | #include "llvm/MC/MCELFObjectWriter.h" |
21 | | #include "llvm/MC/MCFixup.h" |
22 | | #include "llvm/MC/MCValue.h" |
23 | | #include "llvm/Support/ErrorHandling.h" |
24 | | #include <cassert> |
25 | | #include <cstdint> |
26 | | |
27 | | using namespace llvm; |
28 | | |
29 | | namespace { |
30 | | |
31 | | class AArch64ELFObjectWriter : public MCELFObjectTargetWriter { |
32 | | public: |
33 | | AArch64ELFObjectWriter(uint8_t OSABI, bool IsLittleEndian, bool IsILP32); |
34 | | |
35 | 159 | ~AArch64ELFObjectWriter() override = default; |
36 | | |
37 | | protected: |
38 | | unsigned getRelocType(MCContext &Ctx, const MCValue &Target, |
39 | | const MCFixup &Fixup, bool IsPCRel) const override; |
40 | | bool IsILP32; |
41 | | }; |
42 | | |
43 | | } // end anonymous namespace |
44 | | |
45 | | AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI, |
46 | | bool IsLittleEndian, |
47 | | bool IsILP32) |
48 | | : MCELFObjectTargetWriter(/*Is64Bit*/ true, OSABI, ELF::EM_AARCH64, |
49 | | /*HasRelocationAddend*/ true), |
50 | 159 | IsILP32(IsILP32) {} |
51 | | |
52 | | #define R_CLS(rtype) \ |
53 | 642 | IsILP32 ? 630 ELF::R_AARCH64_P32_81 ##rtype : ELF::R_AARCH64_561 ##rtype |
54 | | #define BAD_ILP32_MOV(lp64rtype) \ |
55 | 12 | "ILP32 absolute MOV relocation not " \ |
56 | 12 | "supported (LP64 eqv: " #lp64rtype ")" |
57 | | |
58 | | // assumes IsILP32 is true |
59 | | static bool isNonILP32reloc(const MCFixup &Fixup, |
60 | | AArch64MCExpr::VariantKind RefKind, |
61 | 85 | MCContext &Ctx) { |
62 | 85 | if ((unsigned)Fixup.getKind() != AArch64::fixup_aarch64_movw) |
63 | 73 | return false; |
64 | 12 | switch (RefKind) { |
65 | 1 | case AArch64MCExpr::VK_ABS_G3: |
66 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3)); |
67 | 1 | return true; |
68 | 1 | case AArch64MCExpr::VK_ABS_G2: |
69 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2)); |
70 | 1 | return true; |
71 | 1 | case AArch64MCExpr::VK_ABS_G2_S: |
72 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2)); |
73 | 1 | return true; |
74 | 1 | case AArch64MCExpr::VK_ABS_G2_NC: |
75 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC)); |
76 | 1 | return true; |
77 | 1 | case AArch64MCExpr::VK_ABS_G1_S: |
78 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1)); |
79 | 1 | return true; |
80 | 1 | case AArch64MCExpr::VK_ABS_G1_NC: |
81 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC)); |
82 | 1 | return true; |
83 | 1 | case AArch64MCExpr::VK_DTPREL_G2: |
84 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2)); |
85 | 1 | return true; |
86 | 1 | case AArch64MCExpr::VK_DTPREL_G1_NC: |
87 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC)); |
88 | 1 | return true; |
89 | 1 | case AArch64MCExpr::VK_TPREL_G2: |
90 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2)); |
91 | 1 | return true; |
92 | 1 | case AArch64MCExpr::VK_TPREL_G1_NC: |
93 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC)); |
94 | 1 | return true; |
95 | 1 | case AArch64MCExpr::VK_GOTTPREL_G1: |
96 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1)); |
97 | 1 | return true; |
98 | 1 | case AArch64MCExpr::VK_GOTTPREL_G0_NC: |
99 | 1 | Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC)); |
100 | 1 | return true; |
101 | 0 | default: |
102 | 0 | return false; |
103 | 0 | } |
104 | 0 | return false; |
105 | 0 | } |
106 | | |
107 | | unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx, |
108 | | const MCValue &Target, |
109 | | const MCFixup &Fixup, |
110 | 817 | bool IsPCRel) const { |
111 | 817 | AArch64MCExpr::VariantKind RefKind = |
112 | 817 | static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind()); |
113 | 817 | AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind); |
114 | 817 | bool IsNC = AArch64MCExpr::isNotChecked(RefKind); |
115 | 817 | |
116 | 817 | assert((!Target.getSymA() || |
117 | 817 | Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None) && |
118 | 817 | "Should only be expression-level modifiers here"); |
119 | 817 | |
120 | 817 | assert((!Target.getSymB() || |
121 | 817 | Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) && |
122 | 817 | "Should only be expression-level modifiers here"); |
123 | 817 | |
124 | 817 | if (IsPCRel817 ) { |
125 | 234 | switch ((unsigned)Fixup.getKind()) { |
126 | 1 | case FK_Data_1: |
127 | 1 | Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported"); |
128 | 1 | return ELF::R_AARCH64_NONE; |
129 | 6 | case FK_Data_2: |
130 | 6 | return R_CLS(PREL16); |
131 | 46 | case FK_Data_4: |
132 | 46 | return R_CLS(PREL32); |
133 | 8 | case FK_Data_8: |
134 | 8 | if (IsILP328 ) { |
135 | 1 | Ctx.reportError(Fixup.getLoc(), |
136 | 1 | "ILP32 8 byte PC relative data " |
137 | 1 | "relocation not supported (LP64 eqv: PREL64)"); |
138 | 1 | return ELF::R_AARCH64_NONE; |
139 | 1 | } else |
140 | 7 | return ELF::R_AARCH64_PREL64; |
141 | 6 | case AArch64::fixup_aarch64_pcrel_adr_imm21: |
142 | 6 | assert(SymLoc == AArch64MCExpr::VK_NONE && "unexpected ADR relocation"); |
143 | 6 | return R_CLS(ADR_PREL_LO21); |
144 | 92 | case AArch64::fixup_aarch64_pcrel_adrp_imm21: |
145 | 92 | if (SymLoc == AArch64MCExpr::VK_ABS && 92 !IsNC35 ) |
146 | 35 | return 35 R_CLS35 (ADR_PREL_PG_HI21); |
147 | 57 | if (57 SymLoc == AArch64MCExpr::VK_ABS && 57 IsNC0 ) { |
148 | 0 | if (IsILP320 ) { |
149 | 0 | Ctx.reportError(Fixup.getLoc(), |
150 | 0 | "invalid fixup for 32-bit pcrel ADRP instruction " |
151 | 0 | "VK_ABS VK_NC"); |
152 | 0 | return ELF::R_AARCH64_NONE; |
153 | 0 | } else { |
154 | 0 | return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC; |
155 | 0 | } |
156 | 57 | } |
157 | 57 | if (57 SymLoc == AArch64MCExpr::VK_GOT && 57 !IsNC18 ) |
158 | 18 | return 18 R_CLS18 (ADR_GOT_PAGE); |
159 | 39 | if (39 SymLoc == AArch64MCExpr::VK_GOTTPREL && 39 !IsNC15 ) |
160 | 15 | return 15 R_CLS15 (TLSIE_ADR_GOTTPREL_PAGE21); |
161 | 24 | if (24 SymLoc == AArch64MCExpr::VK_TLSDESC && 24 !IsNC24 ) |
162 | 24 | return 24 R_CLS24 (TLSDESC_ADR_PAGE21); |
163 | 0 | Ctx.reportError(Fixup.getLoc(), |
164 | 0 | "invalid symbol kind for ADRP relocation"); |
165 | 0 | return ELF::R_AARCH64_NONE; |
166 | 12 | case AArch64::fixup_aarch64_pcrel_branch26: |
167 | 12 | return R_CLS(JUMP26); |
168 | 28 | case AArch64::fixup_aarch64_pcrel_call26: |
169 | 28 | return R_CLS(CALL26); |
170 | 14 | case AArch64::fixup_aarch64_ldr_pcrel_imm19: |
171 | 14 | if (SymLoc == AArch64MCExpr::VK_GOTTPREL) |
172 | 2 | return 2 R_CLS2 (TLSIE_LD_GOTTPREL_PREL19); |
173 | 12 | return 12 R_CLS12 (LD_PREL_LO19); |
174 | 8 | case AArch64::fixup_aarch64_pcrel_branch14: |
175 | 8 | return R_CLS(TSTBR14); |
176 | 8 | case AArch64::fixup_aarch64_pcrel_branch19: |
177 | 8 | return R_CLS(CONDBR19); |
178 | 5 | default: |
179 | 5 | Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind"); |
180 | 5 | return ELF::R_AARCH64_NONE; |
181 | 817 | } |
182 | 583 | } else { |
183 | 583 | if (IsILP32 && 583 isNonILP32reloc(Fixup, RefKind, Ctx)85 ) |
184 | 12 | return ELF::R_AARCH64_NONE; |
185 | 571 | switch ((unsigned)Fixup.getKind()) { |
186 | 1 | case FK_Data_1: |
187 | 1 | Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported"); |
188 | 1 | return ELF::R_AARCH64_NONE; |
189 | 6 | case FK_Data_2: |
190 | 6 | return R_CLS(ABS16); |
191 | 72 | case FK_Data_4: |
192 | 72 | return R_CLS(ABS32); |
193 | 37 | case FK_Data_8: |
194 | 37 | if (IsILP3237 ) { |
195 | 1 | Ctx.reportError(Fixup.getLoc(), |
196 | 1 | "ILP32 8 byte absolute data " |
197 | 1 | "relocation not supported (LP64 eqv: ABS64)"); |
198 | 1 | return ELF::R_AARCH64_NONE; |
199 | 1 | } else |
200 | 36 | return ELF::R_AARCH64_ABS64; |
201 | 101 | case AArch64::fixup_aarch64_add_imm12: |
202 | 101 | if (RefKind == AArch64MCExpr::VK_DTPREL_HI12) |
203 | 8 | return 8 R_CLS8 (TLSLD_ADD_DTPREL_HI12); |
204 | 93 | if (93 RefKind == AArch64MCExpr::VK_TPREL_HI1293 ) |
205 | 8 | return 8 R_CLS8 (TLSLE_ADD_TPREL_HI12); |
206 | 85 | if (85 RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC85 ) |
207 | 11 | return 11 R_CLS11 (TLSLD_ADD_DTPREL_LO12_NC); |
208 | 74 | if (74 RefKind == AArch64MCExpr::VK_DTPREL_LO1274 ) |
209 | 8 | return 8 R_CLS8 (TLSLD_ADD_DTPREL_LO12); |
210 | 66 | if (66 RefKind == AArch64MCExpr::VK_TPREL_LO12_NC66 ) |
211 | 11 | return 11 R_CLS11 (TLSLE_ADD_TPREL_LO12_NC); |
212 | 55 | if (55 RefKind == AArch64MCExpr::VK_TPREL_LO1255 ) |
213 | 8 | return 8 R_CLS8 (TLSLE_ADD_TPREL_LO12); |
214 | 47 | if (47 RefKind == AArch64MCExpr::VK_TLSDESC_LO1247 ) |
215 | 24 | return 24 R_CLS24 (TLSDESC_ADD_LO12); |
216 | 23 | if (23 SymLoc == AArch64MCExpr::VK_ABS && 23 IsNC23 ) |
217 | 23 | return 23 R_CLS23 (ADD_ABS_LO12_NC); |
218 | 0 |
|
219 | 0 | Ctx.reportError(Fixup.getLoc(), |
220 | 0 | "invalid fixup for add (uimm12) instruction"); |
221 | 0 | return ELF::R_AARCH64_NONE; |
222 | 43 | case AArch64::fixup_aarch64_ldst_imm12_scale1: |
223 | 43 | if (SymLoc == AArch64MCExpr::VK_ABS && 43 IsNC18 ) |
224 | 18 | return 18 R_CLS18 (LDST8_ABS_LO12_NC); |
225 | 25 | if (25 SymLoc == AArch64MCExpr::VK_DTPREL && 25 !IsNC12 ) |
226 | 6 | return 6 R_CLS6 (TLSLD_LDST8_DTPREL_LO12); |
227 | 19 | if (19 SymLoc == AArch64MCExpr::VK_DTPREL && 19 IsNC6 ) |
228 | 6 | return 6 R_CLS6 (TLSLD_LDST8_DTPREL_LO12_NC); |
229 | 13 | if (13 SymLoc == AArch64MCExpr::VK_TPREL && 13 !IsNC12 ) |
230 | 6 | return 6 R_CLS6 (TLSLE_LDST8_TPREL_LO12); |
231 | 7 | if (7 SymLoc == AArch64MCExpr::VK_TPREL && 7 IsNC6 ) |
232 | 6 | return 6 R_CLS6 (TLSLE_LDST8_TPREL_LO12_NC); |
233 | 1 | |
234 | 1 | Ctx.reportError(Fixup.getLoc(), |
235 | 1 | "invalid fixup for 8-bit load/store instruction"); |
236 | 1 | return ELF::R_AARCH64_NONE; |
237 | 45 | case AArch64::fixup_aarch64_ldst_imm12_scale2: |
238 | 45 | if (SymLoc == AArch64MCExpr::VK_ABS && 45 IsNC20 ) |
239 | 20 | return 20 R_CLS20 (LDST16_ABS_LO12_NC); |
240 | 25 | if (25 SymLoc == AArch64MCExpr::VK_DTPREL && 25 !IsNC12 ) |
241 | 6 | return 6 R_CLS6 (TLSLD_LDST16_DTPREL_LO12); |
242 | 19 | if (19 SymLoc == AArch64MCExpr::VK_DTPREL && 19 IsNC6 ) |
243 | 6 | return 6 R_CLS6 (TLSLD_LDST16_DTPREL_LO12_NC); |
244 | 13 | if (13 SymLoc == AArch64MCExpr::VK_TPREL && 13 !IsNC12 ) |
245 | 6 | return 6 R_CLS6 (TLSLE_LDST16_TPREL_LO12); |
246 | 7 | if (7 SymLoc == AArch64MCExpr::VK_TPREL && 7 IsNC6 ) |
247 | 6 | return 6 R_CLS6 (TLSLE_LDST16_TPREL_LO12_NC); |
248 | 1 | |
249 | 1 | Ctx.reportError(Fixup.getLoc(), |
250 | 1 | "invalid fixup for 16-bit load/store instruction"); |
251 | 1 | return ELF::R_AARCH64_NONE; |
252 | 45 | case AArch64::fixup_aarch64_ldst_imm12_scale4: |
253 | 45 | if (SymLoc == AArch64MCExpr::VK_ABS && 45 IsNC18 ) |
254 | 18 | return 18 R_CLS18 (LDST32_ABS_LO12_NC); |
255 | 27 | if (27 SymLoc == AArch64MCExpr::VK_DTPREL && 27 !IsNC10 ) |
256 | 4 | return 4 R_CLS4 (TLSLD_LDST32_DTPREL_LO12); |
257 | 23 | if (23 SymLoc == AArch64MCExpr::VK_DTPREL && 23 IsNC6 ) |
258 | 6 | return 6 R_CLS6 (TLSLD_LDST32_DTPREL_LO12_NC); |
259 | 17 | if (17 SymLoc == AArch64MCExpr::VK_TPREL && 17 !IsNC10 ) |
260 | 4 | return 4 R_CLS4 (TLSLE_LDST32_TPREL_LO12); |
261 | 13 | if (13 SymLoc == AArch64MCExpr::VK_TPREL && 13 IsNC6 ) |
262 | 6 | return 6 R_CLS6 (TLSLE_LDST32_TPREL_LO12_NC); |
263 | 7 | if (7 SymLoc == AArch64MCExpr::VK_GOT && 7 IsNC2 ) { |
264 | 2 | if (IsILP322 ) { |
265 | 2 | return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC; |
266 | 0 | } else { |
267 | 0 | Ctx.reportError(Fixup.getLoc(), |
268 | 0 | "LP64 4 byte unchecked GOT load/store relocation " |
269 | 0 | "not supported (ILP32 eqv: LD32_GOT_LO12_NC"); |
270 | 0 | return ELF::R_AARCH64_NONE; |
271 | 0 | } |
272 | 5 | } |
273 | 5 | if (5 SymLoc == AArch64MCExpr::VK_GOT && 5 !IsNC0 ) { |
274 | 0 | if (IsILP320 ) { |
275 | 0 | Ctx.reportError(Fixup.getLoc(), |
276 | 0 | "ILP32 4 byte checked GOT load/store relocation " |
277 | 0 | "not supported (unchecked eqv: LD32_GOT_LO12_NC)"); |
278 | 0 | } else { |
279 | 0 | Ctx.reportError(Fixup.getLoc(), |
280 | 0 | "LP64 4 byte checked GOT load/store relocation " |
281 | 0 | "not supported (unchecked/ILP32 eqv: " |
282 | 0 | "LD32_GOT_LO12_NC)"); |
283 | 0 | } |
284 | 0 | return ELF::R_AARCH64_NONE; |
285 | 0 | } |
286 | 5 | if (5 SymLoc == AArch64MCExpr::VK_GOTTPREL && 5 IsNC3 ) { |
287 | 3 | if (IsILP323 ) { |
288 | 2 | return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC; |
289 | 0 | } else { |
290 | 1 | Ctx.reportError(Fixup.getLoc(), |
291 | 1 | "LP64 32-bit load/store " |
292 | 1 | "relocation not supported (ILP32 eqv: " |
293 | 1 | "TLSIE_LD32_GOTTPREL_LO12_NC)"); |
294 | 1 | return ELF::R_AARCH64_NONE; |
295 | 1 | } |
296 | 2 | } |
297 | 2 | if (2 SymLoc == AArch64MCExpr::VK_TLSDESC && 2 !IsNC2 ) { |
298 | 2 | if (IsILP322 ) { |
299 | 2 | return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12; |
300 | 0 | } else { |
301 | 0 | Ctx.reportError(Fixup.getLoc(), |
302 | 0 | "LP64 4 byte TLSDESC load/store relocation " |
303 | 0 | "not supported (ILP32 eqv: TLSDESC_LD64_LO12)"); |
304 | 0 | return ELF::R_AARCH64_NONE; |
305 | 0 | } |
306 | 0 | } |
307 | 0 |
|
308 | 0 | Ctx.reportError(Fixup.getLoc(), |
309 | 0 | "invalid fixup for 32-bit load/store instruction " |
310 | 0 | "fixup_aarch64_ldst_imm12_scale4"); |
311 | 0 | return ELF::R_AARCH64_NONE; |
312 | 88 | case AArch64::fixup_aarch64_ldst_imm12_scale8: |
313 | 88 | if (SymLoc == AArch64MCExpr::VK_ABS && 88 IsNC17 ) |
314 | 17 | return 17 R_CLS17 (LDST64_ABS_LO12_NC); |
315 | 71 | if (71 SymLoc == AArch64MCExpr::VK_GOT && 71 IsNC18 ) { |
316 | 18 | if (!IsILP3218 ) { |
317 | 16 | return ELF::R_AARCH64_LD64_GOT_LO12_NC; |
318 | 0 | } else { |
319 | 2 | Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " |
320 | 2 | "relocation not supported (LP64 eqv: " |
321 | 2 | "LD64_GOT_LO12_NC)"); |
322 | 2 | return ELF::R_AARCH64_NONE; |
323 | 2 | } |
324 | 53 | } |
325 | 53 | if (53 SymLoc == AArch64MCExpr::VK_DTPREL && 53 !IsNC8 ) |
326 | 4 | return 4 R_CLS4 (TLSLD_LDST64_DTPREL_LO12); |
327 | 49 | if (49 SymLoc == AArch64MCExpr::VK_DTPREL && 49 IsNC4 ) |
328 | 4 | return 4 R_CLS4 (TLSLD_LDST64_DTPREL_LO12_NC); |
329 | 45 | if (45 SymLoc == AArch64MCExpr::VK_TPREL && 45 !IsNC8 ) |
330 | 4 | return 4 R_CLS4 (TLSLE_LDST64_TPREL_LO12); |
331 | 41 | if (41 SymLoc == AArch64MCExpr::VK_TPREL && 41 IsNC4 ) |
332 | 4 | return 4 R_CLS4 (TLSLE_LDST64_TPREL_LO12_NC); |
333 | 37 | if (37 SymLoc == AArch64MCExpr::VK_GOTTPREL && 37 IsNC16 ) { |
334 | 16 | if (!IsILP3216 ) { |
335 | 12 | return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC; |
336 | 0 | } else { |
337 | 4 | Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " |
338 | 4 | "relocation not supported (LP64 eqv: " |
339 | 4 | "TLSIE_LD64_GOTTPREL_LO12_NC)"); |
340 | 4 | return ELF::R_AARCH64_NONE; |
341 | 4 | } |
342 | 21 | } |
343 | 21 | if (21 SymLoc == AArch64MCExpr::VK_TLSDESC21 ) { |
344 | 21 | if (!IsILP3221 ) { |
345 | 21 | return ELF::R_AARCH64_TLSDESC_LD64_LO12; |
346 | 0 | } else { |
347 | 0 | Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " |
348 | 0 | "relocation not supported (LP64 eqv: " |
349 | 0 | "TLSDESC_LD64_LO12)"); |
350 | 0 | return ELF::R_AARCH64_NONE; |
351 | 0 | } |
352 | 0 | } |
353 | 0 | Ctx.reportError(Fixup.getLoc(), |
354 | 0 | "invalid fixup for 64-bit load/store instruction"); |
355 | 0 | return ELF::R_AARCH64_NONE; |
356 | 20 | case AArch64::fixup_aarch64_ldst_imm12_scale16: |
357 | 20 | if (SymLoc == AArch64MCExpr::VK_ABS && 20 IsNC8 ) |
358 | 8 | return 8 R_CLS8 (LDST128_ABS_LO12_NC); |
359 | 12 | if (12 SymLoc == AArch64MCExpr::VK_DTPREL && 12 !IsNC6 ) |
360 | 3 | return 3 R_CLS3 (TLSLD_LDST128_DTPREL_LO12); |
361 | 9 | if (9 SymLoc == AArch64MCExpr::VK_DTPREL && 9 IsNC3 ) |
362 | 3 | return 3 R_CLS3 (TLSLD_LDST128_DTPREL_LO12_NC); |
363 | 6 | if (6 SymLoc == AArch64MCExpr::VK_TPREL && 6 !IsNC6 ) |
364 | 3 | return 3 R_CLS3 (TLSLE_LDST128_TPREL_LO12); |
365 | 3 | if (3 SymLoc == AArch64MCExpr::VK_TPREL && 3 IsNC3 ) |
366 | 3 | return 3 R_CLS3 (TLSLE_LDST128_TPREL_LO12_NC); |
367 | 0 |
|
368 | 0 | Ctx.reportError(Fixup.getLoc(), |
369 | 0 | "invalid fixup for 128-bit load/store instruction"); |
370 | 0 | return ELF::R_AARCH64_NONE; |
371 | 0 | // ILP32 case not reached here, tested with isNonILP32reloc |
372 | 94 | case AArch64::fixup_aarch64_movw: |
373 | 94 | if (RefKind == AArch64MCExpr::VK_ABS_G3) |
374 | 7 | return ELF::R_AARCH64_MOVW_UABS_G3; |
375 | 87 | if (87 RefKind == AArch64MCExpr::VK_ABS_G287 ) |
376 | 2 | return ELF::R_AARCH64_MOVW_UABS_G2; |
377 | 85 | if (85 RefKind == AArch64MCExpr::VK_ABS_G2_S85 ) |
378 | 2 | return ELF::R_AARCH64_MOVW_SABS_G2; |
379 | 83 | if (83 RefKind == AArch64MCExpr::VK_ABS_G2_NC83 ) |
380 | 5 | return ELF::R_AARCH64_MOVW_UABS_G2_NC; |
381 | 78 | if (78 RefKind == AArch64MCExpr::VK_ABS_G178 ) |
382 | 2 | return 2 R_CLS2 (MOVW_UABS_G1); |
383 | 76 | if (76 RefKind == AArch64MCExpr::VK_ABS_G1_S76 ) |
384 | 2 | return ELF::R_AARCH64_MOVW_SABS_G1; |
385 | 74 | if (74 RefKind == AArch64MCExpr::VK_ABS_G1_NC74 ) |
386 | 5 | return ELF::R_AARCH64_MOVW_UABS_G1_NC; |
387 | 69 | if (69 RefKind == AArch64MCExpr::VK_ABS_G069 ) |
388 | 2 | return 2 R_CLS2 (MOVW_UABS_G0); |
389 | 67 | if (67 RefKind == AArch64MCExpr::VK_ABS_G0_S67 ) |
390 | 2 | return 2 R_CLS2 (MOVW_SABS_G0); |
391 | 65 | if (65 RefKind == AArch64MCExpr::VK_ABS_G0_NC65 ) |
392 | 5 | return 5 R_CLS5 (MOVW_UABS_G0_NC); |
393 | 60 | if (60 RefKind == AArch64MCExpr::VK_DTPREL_G260 ) |
394 | 6 | return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2; |
395 | 54 | if (54 RefKind == AArch64MCExpr::VK_DTPREL_G154 ) |
396 | 7 | return 7 R_CLS7 (TLSLD_MOVW_DTPREL_G1); |
397 | 47 | if (47 RefKind == AArch64MCExpr::VK_DTPREL_G1_NC47 ) |
398 | 4 | return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC; |
399 | 43 | if (43 RefKind == AArch64MCExpr::VK_DTPREL_G043 ) |
400 | 7 | return 7 R_CLS7 (TLSLD_MOVW_DTPREL_G0); |
401 | 36 | if (36 RefKind == AArch64MCExpr::VK_DTPREL_G0_NC36 ) |
402 | 4 | return 4 R_CLS4 (TLSLD_MOVW_DTPREL_G0_NC); |
403 | 32 | if (32 RefKind == AArch64MCExpr::VK_TPREL_G232 ) |
404 | 4 | return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2; |
405 | 28 | if (28 RefKind == AArch64MCExpr::VK_TPREL_G128 ) |
406 | 7 | return 7 R_CLS7 (TLSLE_MOVW_TPREL_G1); |
407 | 21 | if (21 RefKind == AArch64MCExpr::VK_TPREL_G1_NC21 ) |
408 | 4 | return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC; |
409 | 17 | if (17 RefKind == AArch64MCExpr::VK_TPREL_G017 ) |
410 | 7 | return 7 R_CLS7 (TLSLE_MOVW_TPREL_G0); |
411 | 10 | if (10 RefKind == AArch64MCExpr::VK_TPREL_G0_NC10 ) |
412 | 4 | return 4 R_CLS4 (TLSLE_MOVW_TPREL_G0_NC); |
413 | 6 | if (6 RefKind == AArch64MCExpr::VK_GOTTPREL_G16 ) |
414 | 3 | return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1; |
415 | 3 | if (3 RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC3 ) |
416 | 3 | return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC; |
417 | 0 | Ctx.reportError(Fixup.getLoc(), |
418 | 0 | "invalid fixup for movz/movk instruction"); |
419 | 0 | return ELF::R_AARCH64_NONE; |
420 | 19 | case AArch64::fixup_aarch64_tlsdesc_call: |
421 | 19 | return R_CLS(TLSDESC_CALL); |
422 | 0 | default: |
423 | 0 | Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type"); |
424 | 0 | return ELF::R_AARCH64_NONE; |
425 | 0 | } |
426 | 0 | } |
427 | 0 |
|
428 | 0 | llvm_unreachable0 ("Unimplemented fixup -> relocation"); |
429 | 0 | } |
430 | | |
431 | | MCObjectWriter *llvm::createAArch64ELFObjectWriter(raw_pwrite_stream &OS, |
432 | | uint8_t OSABI, |
433 | | bool IsLittleEndian, |
434 | 159 | bool IsILP32) { |
435 | 159 | MCELFObjectTargetWriter *MOTW = |
436 | 159 | new AArch64ELFObjectWriter(OSABI, IsLittleEndian, IsILP32); |
437 | 159 | return createELFObjectWriter(MOTW, OS, IsLittleEndian); |
438 | 159 | } |