Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/Target/ARM/MCTargetDesc/ARMTargetStreamer.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- ARMTargetStreamer.cpp - ARMTargetStreamer class --*- C++ -*---------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file implements the ARMTargetStreamer class.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "MCTargetDesc/ARMMCTargetDesc.h"
14
#include "llvm/MC/ConstantPools.h"
15
#include "llvm/MC/MCAsmInfo.h"
16
#include "llvm/MC/MCContext.h"
17
#include "llvm/MC/MCExpr.h"
18
#include "llvm/MC/MCStreamer.h"
19
#include "llvm/MC/MCSubtargetInfo.h"
20
#include "llvm/Support/ARMBuildAttributes.h"
21
#include "llvm/Support/TargetParser.h"
22
23
using namespace llvm;
24
25
//
26
// ARMTargetStreamer Implemenation
27
//
28
29
ARMTargetStreamer::ARMTargetStreamer(MCStreamer &S)
30
7.23k
    : MCTargetStreamer(S), ConstantPools(new AssemblerConstantPools()) {}
31
32
7.20k
ARMTargetStreamer::~ARMTargetStreamer() = default;
33
34
// The constant pool handling is shared by all ARMTargetStreamer
35
// implementations.
36
292
const MCExpr *ARMTargetStreamer::addConstantPoolEntry(const MCExpr *Expr, SMLoc Loc) {
37
292
  return ConstantPools->addEntry(Streamer, Expr, 4, Loc);
38
292
}
39
40
44
void ARMTargetStreamer::emitCurrentConstantPool() {
41
44
  ConstantPools->emitForCurrentSection(Streamer);
42
44
  ConstantPools->clearCacheForCurrentSection(Streamer);
43
44
}
44
45
// finish() - write out any non-empty assembler constant pools.
46
6.60k
void ARMTargetStreamer::finish() { ConstantPools->emitAll(Streamer); }
47
48
// reset() - Reset any state
49
0
void ARMTargetStreamer::reset() {}
50
51
4
void ARMTargetStreamer::emitInst(uint32_t Inst, char Suffix) {
52
4
  unsigned Size;
53
4
  char Buffer[4];
54
4
  const bool LittleEndian = getStreamer().getContext().getAsmInfo()->isLittleEndian();
55
4
56
4
  switch (Suffix) {
57
4
  case '\0':
58
0
    Size = 4;
59
0
60
0
    for (unsigned II = 0, IE = Size; II != IE; II++) {
61
0
      const unsigned I = LittleEndian ? (Size - II - 1) : II;
62
0
      Buffer[Size - II - 1] = uint8_t(Inst >> I * CHAR_BIT);
63
0
    }
64
0
65
0
    break;
66
4
  case 'n':
67
4
  case 'w':
68
4
    Size = (Suffix == 'n' ? 
22
:
42
);
69
4
70
4
    // Thumb wide instructions are emitted as a pair of 16-bit words of the
71
4
    // appropriate endianness.
72
10
    for (unsigned II = 0, IE = Size; II != IE; 
II = II + 26
) {
73
6
      const unsigned I0 = LittleEndian ? II + 0 : 
II + 10
;
74
6
      const unsigned I1 = LittleEndian ? II + 1 : 
II + 00
;
75
6
      Buffer[Size - II - 2] = uint8_t(Inst >> I0 * CHAR_BIT);
76
6
      Buffer[Size - II - 1] = uint8_t(Inst >> I1 * CHAR_BIT);
77
6
    }
78
4
79
4
    break;
80
4
  default:
81
0
    llvm_unreachable("Invalid Suffix");
82
4
  }
83
4
  getStreamer().EmitBytes(StringRef(Buffer, Size));
84
4
}
85
86
// The remaining callbacks should be handled separately by each
87
// streamer.
88
3
void ARMTargetStreamer::emitFnStart() {}
89
1
void ARMTargetStreamer::emitFnEnd() {}
90
0
void ARMTargetStreamer::emitCantUnwind() {}
91
0
void ARMTargetStreamer::emitPersonality(const MCSymbol *Personality) {}
92
0
void ARMTargetStreamer::emitPersonalityIndex(unsigned Index) {}
93
0
void ARMTargetStreamer::emitHandlerData() {}
94
void ARMTargetStreamer::emitSetFP(unsigned FpReg, unsigned SpReg,
95
0
                                  int64_t Offset) {}
96
0
void ARMTargetStreamer::emitMovSP(unsigned Reg, int64_t Offset) {}
97
0
void ARMTargetStreamer::emitPad(int64_t Offset) {}
98
void ARMTargetStreamer::emitRegSave(const SmallVectorImpl<unsigned> &RegList,
99
0
                                    bool isVector) {}
100
void ARMTargetStreamer::emitUnwindRaw(int64_t StackOffset,
101
0
                                      const SmallVectorImpl<uint8_t> &Opcodes) {
102
0
}
103
653
void ARMTargetStreamer::switchVendor(StringRef Vendor) {}
104
4.33k
void ARMTargetStreamer::emitAttribute(unsigned Attribute, unsigned Value) {}
105
void ARMTargetStreamer::emitTextAttribute(unsigned Attribute,
106
417
                                          StringRef String) {}
107
void ARMTargetStreamer::emitIntTextAttribute(unsigned Attribute,
108
                                             unsigned IntValue,
109
0
                                             StringRef StringValue) {}
110
0
void ARMTargetStreamer::emitArch(ARM::ArchKind Arch) {}
111
2
void ARMTargetStreamer::emitArchExtension(unsigned ArchExt) {}
112
0
void ARMTargetStreamer::emitObjectArch(ARM::ArchKind Arch) {}
113
485
void ARMTargetStreamer::emitFPU(unsigned FPU) {}
114
2.19k
void ARMTargetStreamer::finishAttributeSection() {}
115
void
116
0
ARMTargetStreamer::AnnotateTLSDescriptorSequence(const MCSymbolRefExpr *SRE) {}
117
0
void ARMTargetStreamer::emitThumbSet(MCSymbol *Symbol, const MCExpr *Value) {}
118
119
2.90k
static ARMBuildAttrs::CPUArch getArchForCPU(const MCSubtargetInfo &STI) {
120
2.90k
  if (STI.getCPU() == "xscale")
121
1
    return ARMBuildAttrs::v5TEJ;
122
2.90k
123
2.90k
  if (STI.hasFeature(ARM::HasV8Ops)) {
124
198
    if (STI.hasFeature(ARM::FeatureRClass))
125
10
      return ARMBuildAttrs::v8_R;
126
188
    return ARMBuildAttrs::v8_A;
127
2.70k
  } else if (STI.hasFeature(ARM::HasV8_1MMainlineOps))
128
68
    return ARMBuildAttrs::v8_1_M_Main;
129
2.63k
  else if (STI.hasFeature(ARM::HasV8MMainlineOps))
130
66
    return ARMBuildAttrs::v8_M_Main;
131
2.57k
  else if (STI.hasFeature(ARM::HasV7Ops)) {
132
1.61k
    if (STI.hasFeature(ARM::FeatureMClass) && 
STI.hasFeature(ARM::FeatureDSP)278
)
133
162
      return ARMBuildAttrs::v7E_M;
134
1.45k
    return ARMBuildAttrs::v7;
135
1.45k
  } else 
if (955
STI.hasFeature(ARM::HasV6T2Ops)955
)
136
115
    return ARMBuildAttrs::v6T2;
137
840
  else if (STI.hasFeature(ARM::HasV8MBaselineOps))
138
29
    return ARMBuildAttrs::v8_M_Base;
139
811
  else if (STI.hasFeature(ARM::HasV6MOps))
140
101
    return ARMBuildAttrs::v6S_M;
141
710
  else if (STI.hasFeature(ARM::HasV6Ops))
142
95
    return ARMBuildAttrs::v6;
143
615
  else if (STI.hasFeature(ARM::HasV5TEOps))
144
13
    return ARMBuildAttrs::v5TE;
145
602
  else if (STI.hasFeature(ARM::HasV5TOps))
146
21
    return ARMBuildAttrs::v5T;
147
581
  else if (STI.hasFeature(ARM::HasV4TOps))
148
110
    return ARMBuildAttrs::v4T;
149
471
  else
150
471
    return ARMBuildAttrs::v4;
151
2.90k
}
152
153
4.83k
static bool isV8M(const MCSubtargetInfo &STI) {
154
4.83k
  // Note that v8M Baseline is a subset of v6T2!
155
4.83k
  return (STI.hasFeature(ARM::HasV8MBaselineOps) &&
156
4.83k
          
!STI.hasFeature(ARM::HasV6T2Ops)3.95k
) ||
157
4.83k
         
STI.hasFeature(ARM::HasV8MMainlineOps)4.81k
;
158
4.83k
}
159
160
/// Emit the build attributes that only depend on the hardware that we expect
161
// /to be available, and not on the ABI, or any source-language choices.
162
2.90k
void ARMTargetStreamer::emitTargetAttributes(const MCSubtargetInfo &STI) {
163
2.90k
  switchVendor("aeabi");
164
2.90k
165
2.90k
  const StringRef CPUString = STI.getCPU();
166
2.90k
  if (!CPUString.empty() && 
!CPUString.startswith("generic")1.23k
) {
167
981
    // FIXME: remove krait check when GNU tools support krait cpu
168
981
    if (STI.hasFeature(ARM::ProcKrait)) {
169
6
      emitTextAttribute(ARMBuildAttrs::CPU_name, "cortex-a9");
170
6
      // We consider krait as a "cortex-a9" + hwdiv CPU
171
6
      // Enable hwdiv through ".arch_extension idiv"
172
6
      if (STI.hasFeature(ARM::FeatureHWDivThumb) ||
173
6
          
STI.hasFeature(ARM::FeatureHWDivARM)1
)
174
5
        emitArchExtension(ARM::AEK_HWDIVTHUMB | ARM::AEK_HWDIVARM);
175
975
    } else {
176
975
      emitTextAttribute(ARMBuildAttrs::CPU_name, CPUString);
177
975
    }
178
981
  }
179
2.90k
180
2.90k
  emitAttribute(ARMBuildAttrs::CPU_arch, getArchForCPU(STI));
181
2.90k
182
2.90k
  if (STI.hasFeature(ARM::FeatureAClass)) {
183
1.48k
    emitAttribute(ARMBuildAttrs::CPU_arch_profile,
184
1.48k
                      ARMBuildAttrs::ApplicationProfile);
185
1.48k
  } else 
if (1.42k
STI.hasFeature(ARM::FeatureRClass)1.42k
) {
186
23
    emitAttribute(ARMBuildAttrs::CPU_arch_profile,
187
23
                      ARMBuildAttrs::RealTimeProfile);
188
1.39k
  } else if (STI.hasFeature(ARM::FeatureMClass)) {
189
537
    emitAttribute(ARMBuildAttrs::CPU_arch_profile,
190
537
                      ARMBuildAttrs::MicroControllerProfile);
191
537
  }
192
2.90k
193
2.90k
  emitAttribute(ARMBuildAttrs::ARM_ISA_use, STI.hasFeature(ARM::FeatureNoARM)
194
2.90k
                                                ? 
ARMBuildAttrs::Not_Allowed537
195
2.90k
                                                : 
ARMBuildAttrs::Allowed2.36k
);
196
2.90k
197
2.90k
  if (isV8M(STI)) {
198
163
    emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
199
163
                      ARMBuildAttrs::AllowThumbDerived);
200
2.74k
  } else if (STI.hasFeature(ARM::FeatureThumb2)) {
201
1.92k
    emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
202
1.92k
                      ARMBuildAttrs::AllowThumb32);
203
1.92k
  } else 
if (812
STI.hasFeature(ARM::HasV4TOps)812
) {
204
341
    emitAttribute(ARMBuildAttrs::THUMB_ISA_use, ARMBuildAttrs::Allowed);
205
341
  }
206
2.90k
207
2.90k
  if (STI.hasFeature(ARM::FeatureNEON)) {
208
1.50k
    /* NEON is not exactly a VFP architecture, but GAS emit one of
209
1.50k
     * neon/neon-fp-armv8/neon-vfpv4/vfpv3/vfpv2 for .fpu parameters */
210
1.50k
    if (STI.hasFeature(ARM::FeatureFPARMv8)) {
211
186
      if (STI.hasFeature(ARM::FeatureCrypto))
212
176
        emitFPU(ARM::FK_CRYPTO_NEON_FP_ARMV8);
213
10
      else
214
10
        emitFPU(ARM::FK_NEON_FP_ARMV8);
215
1.32k
    } else if (STI.hasFeature(ARM::FeatureVFP4))
216
372
      emitFPU(ARM::FK_NEON_VFPV4);
217
950
    else
218
950
      emitFPU(STI.hasFeature(ARM::FeatureFP16) ? 
ARM::FK_NEON_FP1660
219
950
                                               : 
ARM::FK_NEON890
);
220
1.50k
    // Emit Tag_Advanced_SIMD_arch for ARMv8 architecture
221
1.50k
    if (STI.hasFeature(ARM::HasV8Ops))
222
193
      emitAttribute(ARMBuildAttrs::Advanced_SIMD_arch,
223
193
                    STI.hasFeature(ARM::HasV8_1aOps)
224
193
                        ? 
ARMBuildAttrs::AllowNeonARMv8_1a36
225
193
                        : 
ARMBuildAttrs::AllowNeonARMv8157
);
226
1.50k
  } else {
227
1.39k
    if (STI.hasFeature(ARM::FeatureFPARMv8_D16_SP))
228
97
      // FPv5 and FP-ARMv8 have the same instructions, so are modeled as one
229
97
      // FPU, but there are two different names for it depending on the CPU.
230
97
      emitFPU(STI.hasFeature(ARM::FeatureD32)
231
97
                  ? 
ARM::FK_FP_ARMV818
232
97
                  : 
(STI.hasFeature(ARM::FeatureFP64) 79
?
ARM::FK_FPV5_D1616
233
79
                                                      : 
ARM::FK_FPV5_SP_D1663
));
234
1.29k
    else if (STI.hasFeature(ARM::FeatureVFP4_D16_SP))
235
84
      emitFPU(STI.hasFeature(ARM::FeatureD32)
236
84
                  ? 
ARM::FK_VFPV45
237
84
                  : 
(STI.hasFeature(ARM::FeatureFP64) 79
?
ARM::FK_VFPV4_D163
238
79
                                                      : 
ARM::FK_FPV4_SP_D1676
));
239
1.21k
    else if (STI.hasFeature(ARM::FeatureVFP3_D16_SP))
240
39
      emitFPU(
241
39
          STI.hasFeature(ARM::FeatureD32)
242
39
              // +d32
243
39
              ? 
(STI.hasFeature(ARM::FeatureFP16) 24
?
ARM::FK_VFPV3_FP161
244
24
                                                  : 
ARM::FK_VFPV323
)
245
39
              // -d32
246
39
              : (STI.hasFeature(ARM::FeatureFP64)
247
15
                     ? (STI.hasFeature(ARM::FeatureFP16)
248
13
                            ? 
ARM::FK_VFPV3_D16_FP167
249
13
                            : 
ARM::FK_VFPV3_D166
)
250
15
                     : 
(STI.hasFeature(ARM::FeatureFP16) 2
?
ARM::FK_VFPV3XD_FP161
251
2
                                                         : 
ARM::FK_VFPV3XD1
)));
252
1.17k
    else if (STI.hasFeature(ARM::FeatureVFP2_D16_SP))
253
59
      emitFPU(ARM::FK_VFPV2);
254
1.39k
  }
255
2.90k
256
2.90k
  // ABI_HardFP_use attribute to indicate single precision FP.
257
2.90k
  if (STI.hasFeature(ARM::FeatureVFP2_D16_SP) && 
!STI.hasFeature(ARM::FeatureFP64)1.78k
)
258
147
    emitAttribute(ARMBuildAttrs::ABI_HardFP_use,
259
147
                  ARMBuildAttrs::HardFPSinglePrecision);
260
2.90k
261
2.90k
  if (STI.hasFeature(ARM::FeatureFP16))
262
816
    emitAttribute(ARMBuildAttrs::FP_HP_extension, ARMBuildAttrs::AllowHPFP);
263
2.90k
264
2.90k
  if (STI.hasFeature(ARM::FeatureMP))
265
610
    emitAttribute(ARMBuildAttrs::MPextension_use, ARMBuildAttrs::AllowMP);
266
2.90k
267
2.90k
  if (STI.hasFeature(ARM::HasMVEFloatOps))
268
20
    emitAttribute(ARMBuildAttrs::MVE_arch, ARMBuildAttrs::AllowMVEIntegerAndFloat);
269
2.88k
  else if (STI.hasFeature(ARM::HasMVEIntegerOps))
270
35
    emitAttribute(ARMBuildAttrs::MVE_arch, ARMBuildAttrs::AllowMVEInteger);
271
2.90k
272
2.90k
  // Hardware divide in ARM mode is part of base arch, starting from ARMv8.
273
2.90k
  // If only Thumb hwdiv is present, it must also be in base arch (ARMv7-R/M).
274
2.90k
  // It is not possible to produce DisallowDIV: if hwdiv is present in the base
275
2.90k
  // arch, supplying -hwdiv downgrades the effective arch, via ClearImpliedBits.
276
2.90k
  // AllowDIVExt is only emitted if hwdiv isn't available in the base arch;
277
2.90k
  // otherwise, the default value (AllowDIVIfExists) applies.
278
2.90k
  if (STI.hasFeature(ARM::FeatureHWDivARM) && 
!STI.hasFeature(ARM::HasV8Ops)578
)
279
389
    emitAttribute(ARMBuildAttrs::DIV_use, ARMBuildAttrs::AllowDIVExt);
280
2.90k
281
2.90k
  if (STI.hasFeature(ARM::FeatureDSP) && 
isV8M(STI)1.93k
)
282
96
    emitAttribute(ARMBuildAttrs::DSP_extension, ARMBuildAttrs::Allowed);
283
2.90k
284
2.90k
  if (STI.hasFeature(ARM::FeatureStrictAlign))
285
567
    emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
286
567
                  ARMBuildAttrs::Not_Allowed);
287
2.33k
  else
288
2.33k
    emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
289
2.33k
                  ARMBuildAttrs::Allowed);
290
2.90k
291
2.90k
  if (STI.hasFeature(ARM::FeatureTrustZone) &&
292
2.90k
      
STI.hasFeature(ARM::FeatureVirtualization)584
)
293
430
    emitAttribute(ARMBuildAttrs::Virtualization_use,
294
430
                  ARMBuildAttrs::AllowTZVirtualization);
295
2.47k
  else if (STI.hasFeature(ARM::FeatureTrustZone))
296
154
    emitAttribute(ARMBuildAttrs::Virtualization_use, ARMBuildAttrs::AllowTZ);
297
2.31k
  else if (STI.hasFeature(ARM::FeatureVirtualization))
298
10
    emitAttribute(ARMBuildAttrs::Virtualization_use,
299
10
                  ARMBuildAttrs::AllowVirtualization);
300
2.90k
}