Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/include/llvm/CodeGen/GlobalISel/CallLowering.h
Line
Count
Source (jump to first uncovered line)
1
//===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
///
9
/// \file
10
/// This file describes how to lower LLVM calls to machine code calls.
11
///
12
//===----------------------------------------------------------------------===//
13
14
#ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
15
#define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
16
17
#include "llvm/ADT/ArrayRef.h"
18
#include "llvm/ADT/SmallVector.h"
19
#include "llvm/CodeGen/CallingConvLower.h"
20
#include "llvm/CodeGen/TargetCallingConv.h"
21
#include "llvm/IR/CallSite.h"
22
#include "llvm/IR/CallingConv.h"
23
#include "llvm/Support/ErrorHandling.h"
24
#include "llvm/Support/MachineValueType.h"
25
#include <cstdint>
26
#include <functional>
27
28
namespace llvm {
29
30
class CCState;
31
class DataLayout;
32
class Function;
33
class MachineIRBuilder;
34
class MachineOperand;
35
struct MachinePointerInfo;
36
class MachineRegisterInfo;
37
class TargetLowering;
38
class Type;
39
class Value;
40
41
class CallLowering {
42
  const TargetLowering *TLI;
43
44
  virtual void anchor();
45
public:
46
  struct ArgInfo {
47
    SmallVector<Register, 4> Regs;
48
    Type *Ty;
49
    ISD::ArgFlagsTy Flags;
50
    bool IsFixed;
51
52
    ArgInfo(ArrayRef<Register> Regs, Type *Ty,
53
            ISD::ArgFlagsTy Flags = ISD::ArgFlagsTy{}, bool IsFixed = true)
54
        : Regs(Regs.begin(), Regs.end()), Ty(Ty), Flags(Flags),
55
9.40M
          IsFixed(IsFixed) {
56
9.40M
      // FIXME: We should have just one way of saying "no register".
57
9.40M
      assert((Ty->isVoidTy() == (Regs.empty() || Regs[0] == 0)) &&
58
9.40M
             "only void types should have no register");
59
9.40M
    }
60
  };
61
62
  /// Argument handling is mostly uniform between the four places that
63
  /// make these decisions: function formal arguments, call
64
  /// instruction args, call instruction returns and function
65
  /// returns. However, once a decision has been made on where an
66
  /// arugment should go, exactly what happens can vary slightly. This
67
  /// class abstracts the differences.
68
  struct ValueHandler {
69
    ValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI,
70
                 CCAssignFn *AssignFn)
71
2.59M
      : MIRBuilder(MIRBuilder), MRI(MRI), AssignFn(AssignFn) {}
72
73
2.59M
    virtual ~ValueHandler() = default;
74
75
    /// Returns true if the handler is dealing with formal arguments,
76
    /// not with return values etc.
77
3.02M
    virtual bool isArgumentHandler() const { return false; }
78
79
    /// Materialize a VReg containing the address of the specified
80
    /// stack-based object. This is either based on a FrameIndex or
81
    /// direct SP manipulation, depending on the context. \p MPO
82
    /// should be initialized to an appropriate description of the
83
    /// address created.
84
    virtual Register getStackAddress(uint64_t Size, int64_t Offset,
85
                                     MachinePointerInfo &MPO) = 0;
86
87
    /// The specified value has been assigned to a physical register,
88
    /// handle the appropriate COPY (either to or from) and mark any
89
    /// relevant uses/defines as needed.
90
    virtual void assignValueToReg(Register ValVReg, Register PhysReg,
91
                                  CCValAssign &VA) = 0;
92
93
    /// The specified value has been assigned to a stack
94
    /// location. Load or store it there, with appropriate extension
95
    /// if necessary.
96
    virtual void assignValueToAddress(Register ValVReg, Register Addr,
97
                                      uint64_t Size, MachinePointerInfo &MPO,
98
                                      CCValAssign &VA) = 0;
99
100
    /// Handle custom values, which may be passed into one or more of \p VAs.
101
    /// \return The number of \p VAs that have been assigned after the first
102
    ///         one, and which should therefore be skipped from further
103
    ///         processing.
104
    virtual unsigned assignCustomValue(const ArgInfo &Arg,
105
0
                                       ArrayRef<CCValAssign> VAs) {
106
0
      // This is not a pure virtual method because not all targets need to worry
107
0
      // about custom values.
108
0
      llvm_unreachable("Custom values not supported");
109
0
    }
110
111
    Register extendRegister(Register ValReg, CCValAssign &VA);
112
113
    virtual bool assignArg(unsigned ValNo, MVT ValVT, MVT LocVT,
114
                           CCValAssign::LocInfo LocInfo, const ArgInfo &Info,
115
1.41M
                           CCState &State) {
116
1.41M
      return AssignFn(ValNo, ValVT, LocVT, LocInfo, Info.Flags, State);
117
1.41M
    }
118
119
    MachineIRBuilder &MIRBuilder;
120
    MachineRegisterInfo &MRI;
121
    CCAssignFn *AssignFn;
122
123
  private:
124
    virtual void anchor();
125
  };
126
127
protected:
128
  /// Getter for generic TargetLowering class.
129
11
  const TargetLowering *getTLI() const {
130
11
    return TLI;
131
11
  }
132
133
  /// Getter for target specific TargetLowering class.
134
  template <class XXXTargetLowering>
135
6.24M
    const XXXTargetLowering *getTLI() const {
136
6.24M
    return static_cast<const XXXTargetLowering *>(TLI);
137
6.24M
  }
llvm::AArch64TargetLowering const* llvm::CallLowering::getTLI<llvm::AArch64TargetLowering>() const
Line
Count
Source
135
6.24M
    const XXXTargetLowering *getTLI() const {
136
6.24M
    return static_cast<const XXXTargetLowering *>(TLI);
137
6.24M
  }
llvm::SITargetLowering const* llvm::CallLowering::getTLI<llvm::SITargetLowering>() const
Line
Count
Source
135
570
    const XXXTargetLowering *getTLI() const {
136
570
    return static_cast<const XXXTargetLowering *>(TLI);
137
570
  }
llvm::AMDGPUTargetLowering const* llvm::CallLowering::getTLI<llvm::AMDGPUTargetLowering>() const
Line
Count
Source
135
3
    const XXXTargetLowering *getTLI() const {
136
3
    return static_cast<const XXXTargetLowering *>(TLI);
137
3
  }
llvm::ARMTargetLowering const* llvm::CallLowering::getTLI<llvm::ARMTargetLowering>() const
Line
Count
Source
135
3.22k
    const XXXTargetLowering *getTLI() const {
136
3.22k
    return static_cast<const XXXTargetLowering *>(TLI);
137
3.22k
  }
llvm::MipsTargetLowering const* llvm::CallLowering::getTLI<llvm::MipsTargetLowering>() const
Line
Count
Source
135
1.80k
    const XXXTargetLowering *getTLI() const {
136
1.80k
    return static_cast<const XXXTargetLowering *>(TLI);
137
1.80k
  }
llvm::X86TargetLowering const* llvm::CallLowering::getTLI<llvm::X86TargetLowering>() const
Line
Count
Source
135
1.73k
    const XXXTargetLowering *getTLI() const {
136
1.73k
    return static_cast<const XXXTargetLowering *>(TLI);
137
1.73k
  }
138
139
  template <typename FuncInfoTy>
140
  void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL,
141
                   const FuncInfoTy &FuncInfo) const;
142
143
  /// Generate instructions for packing \p SrcRegs into one big register
144
  /// corresponding to the aggregate type \p PackedTy.
145
  ///
146
  /// \param SrcRegs should contain one virtual register for each base type in
147
  ///                \p PackedTy, as returned by computeValueLLTs.
148
  ///
149
  /// \return The packed register.
150
  Register packRegs(ArrayRef<Register> SrcRegs, Type *PackedTy,
151
                    MachineIRBuilder &MIRBuilder) const;
152
153
  /// Generate instructions for unpacking \p SrcReg into the \p DstRegs
154
  /// corresponding to the aggregate type \p PackedTy.
155
  ///
156
  /// \param DstRegs should contain one virtual register for each base type in
157
  ///        \p PackedTy, as returned by computeValueLLTs.
158
  void unpackRegs(ArrayRef<Register> DstRegs, Register SrcReg, Type *PackedTy,
159
                  MachineIRBuilder &MIRBuilder) const;
160
161
  /// Invoke Handler::assignArg on each of the given \p Args and then use
162
  /// \p Callback to move them to the assigned locations.
163
  ///
164
  /// \return True if everything has succeeded, false otherwise.
165
  bool handleAssignments(MachineIRBuilder &MIRBuilder, ArrayRef<ArgInfo> Args,
166
                         ValueHandler &Handler) const;
167
  bool handleAssignments(CCState &CCState,
168
                         SmallVectorImpl<CCValAssign> &ArgLocs,
169
                         MachineIRBuilder &MIRBuilder, ArrayRef<ArgInfo> Args,
170
                         ValueHandler &Handler) const;
171
public:
172
46.8k
  CallLowering(const TargetLowering *TLI) : TLI(TLI) {}
173
34.0k
  virtual ~CallLowering() = default;
174
175
  /// \return true if the target is capable of handling swifterror values that
176
  /// have been promoted to a specified register. The extended versions of
177
  /// lowerReturn and lowerCall should be implemented.
178
4.74k
  virtual bool supportSwiftError() const {
179
4.74k
    return false;
180
4.74k
  }
181
182
  /// This hook must be implemented to lower outgoing return values, described
183
  /// by \p Val, into the specified virtual registers \p VRegs.
184
  /// This hook is used by GlobalISel.
185
  ///
186
  /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter
187
  /// that needs to be implicitly returned.
188
  ///
189
  /// \return True if the lowering succeeds, false otherwise.
190
  virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
191
                           ArrayRef<Register> VRegs,
192
1.70k
                           Register SwiftErrorVReg) const {
193
1.70k
    if (!supportSwiftError()) {
194
1.70k
      assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror");
195
1.70k
      return lowerReturn(MIRBuilder, Val, VRegs);
196
1.70k
    }
197
0
    return false;
198
0
  }
199
200
  /// This hook behaves as the extended lowerReturn function, but for targets
201
  /// that do not support swifterror value promotion.
202
  virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
203
0
                           ArrayRef<Register> VRegs) const {
204
0
    return false;
205
0
  }
206
207
  /// This hook must be implemented to lower the incoming (formal)
208
  /// arguments, described by \p VRegs, for GlobalISel. Each argument
209
  /// must end up in the related virtual registers described by \p VRegs.
210
  /// In other words, the first argument should end up in \c VRegs[0],
211
  /// the second in \c VRegs[1], and so on. For each argument, there will be one
212
  /// register for each non-aggregate type, as returned by \c computeValueLLTs.
213
  /// \p MIRBuilder is set to the proper insertion for the argument
214
  /// lowering.
215
  ///
216
  /// \return True if the lowering succeeded, false otherwise.
217
  virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder,
218
                                    const Function &F,
219
0
                                    ArrayRef<ArrayRef<Register>> VRegs) const {
220
0
    return false;
221
0
  }
222
223
  /// This hook must be implemented to lower the given call instruction,
224
  /// including argument and return value marshalling.
225
  ///
226
  /// \p CallConv is the calling convention to be used for the call.
227
  ///
228
  /// \p Callee is the destination of the call. It should be either a register,
229
  /// globaladdress, or externalsymbol.
230
  ///
231
  /// \p OrigRet is a descriptor for the return type of the function.
232
  ///
233
  /// \p OrigArgs is a list of descriptors of the arguments passed to the
234
  /// function.
235
  ///
236
  /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout
237
  /// parameter, and contains the vreg that the swifterror should be copied into
238
  /// after the call.
239
  ///
240
  /// \return true if the lowering succeeded, false otherwise.
241
  virtual bool lowerCall(MachineIRBuilder &MIRBuilder, CallingConv::ID CallConv,
242
                         const MachineOperand &Callee, const ArgInfo &OrigRet,
243
                         ArrayRef<ArgInfo> OrigArgs,
244
159
                         Register SwiftErrorVReg) const {
245
159
    if (!supportSwiftError()) {
246
159
      assert(SwiftErrorVReg == 0 && "trying to use unsupported swifterror");
247
159
      return lowerCall(MIRBuilder, CallConv, Callee, OrigRet, OrigArgs);
248
159
    }
249
0
    return false;
250
0
  }
251
252
  /// This hook behaves as the extended lowerCall function, but for targets that
253
  /// do not support swifterror value promotion.
254
  virtual bool lowerCall(MachineIRBuilder &MIRBuilder, CallingConv::ID CallConv,
255
                         const MachineOperand &Callee, const ArgInfo &OrigRet,
256
0
                         ArrayRef<ArgInfo> OrigArgs) const {
257
0
    return false;
258
0
  }
259
260
  /// Lower the given call instruction, including argument and return value
261
  /// marshalling.
262
  ///
263
  /// \p CI is the call/invoke instruction.
264
  ///
265
  /// \p ResRegs are the registers where the call's return value should be
266
  /// stored (or 0 if there is no return value). There will be one register for
267
  /// each non-aggregate type, as returned by \c computeValueLLTs.
268
  ///
269
  /// \p ArgRegs is a list of lists of virtual registers containing each
270
  /// argument that needs to be passed (argument \c i should be placed in \c
271
  /// ArgRegs[i]). For each argument, there will be one register for each
272
  /// non-aggregate type, as returned by \c computeValueLLTs.
273
  ///
274
  /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout
275
  /// parameter, and contains the vreg that the swifterror should be copied into
276
  /// after the call.
277
  ///
278
  /// \p GetCalleeReg is a callback to materialize a register for the callee if
279
  /// the target determines it cannot jump to the destination based purely on \p
280
  /// CI. This might be because \p CI is indirect, or because of the limited
281
  /// range of an immediate jump.
282
  ///
283
  /// \return true if the lowering succeeded, false otherwise.
284
  bool lowerCall(MachineIRBuilder &MIRBuilder, ImmutableCallSite CS,
285
                 ArrayRef<Register> ResRegs,
286
                 ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg,
287
                 std::function<unsigned()> GetCalleeReg) const;
288
};
289
290
} // end namespace llvm
291
292
#endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H