Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/CodeGen/GlobalISel/MachineIRBuilder.cpp
Line
Count
Source (jump to first uncovered line)
1
//===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
/// \file
9
/// This file implements the MachineIRBuidler class.
10
//===----------------------------------------------------------------------===//
11
#include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12
#include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
13
14
#include "llvm/CodeGen/MachineFunction.h"
15
#include "llvm/CodeGen/MachineInstr.h"
16
#include "llvm/CodeGen/MachineInstrBuilder.h"
17
#include "llvm/CodeGen/MachineRegisterInfo.h"
18
#include "llvm/CodeGen/TargetInstrInfo.h"
19
#include "llvm/CodeGen/TargetLowering.h"
20
#include "llvm/CodeGen/TargetOpcodes.h"
21
#include "llvm/CodeGen/TargetSubtargetInfo.h"
22
#include "llvm/IR/DebugInfo.h"
23
24
using namespace llvm;
25
26
6.16M
void MachineIRBuilder::setMF(MachineFunction &MF) {
27
6.16M
  State.MF = &MF;
28
6.16M
  State.MBB = nullptr;
29
6.16M
  State.MRI = &MF.getRegInfo();
30
6.16M
  State.TII = MF.getSubtarget().getInstrInfo();
31
6.16M
  State.DL = DebugLoc();
32
6.16M
  State.II = MachineBasicBlock::iterator();
33
6.16M
  State.Observer = nullptr;
34
6.16M
}
35
36
10.7M
void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) {
37
10.7M
  State.MBB = &MBB;
38
10.7M
  State.II = MBB.end();
39
10.7M
  assert(&getMF() == MBB.getParent() &&
40
10.7M
         "Basic block is in a different function");
41
10.7M
}
42
43
6.61M
void MachineIRBuilder::setInstr(MachineInstr &MI) {
44
6.61M
  assert(MI.getParent() && "Instruction is not part of a basic block");
45
6.61M
  setMBB(*MI.getParent());
46
6.61M
  State.II = MI.getIterator();
47
6.61M
}
48
49
715k
void MachineIRBuilder::setCSEInfo(GISelCSEInfo *Info) { State.CSEInfo = Info; }
50
51
void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB,
52
1.26M
                                   MachineBasicBlock::iterator II) {
53
1.26M
  assert(MBB.getParent() == &getMF() &&
54
1.26M
         "Basic block is in a different function");
55
1.26M
  State.MBB = &MBB;
56
1.26M
  State.II = II;
57
1.26M
}
58
59
25.7M
void MachineIRBuilder::recordInsertion(MachineInstr *InsertedInstr) const {
60
25.7M
  if (State.Observer)
61
2.56M
    State.Observer->createdInstr(*InsertedInstr);
62
25.7M
}
63
64
240k
void MachineIRBuilder::setChangeObserver(GISelChangeObserver &Observer) {
65
240k
  State.Observer = &Observer;
66
240k
}
67
68
4.72k
void MachineIRBuilder::stopObservingChanges() { State.Observer = nullptr; }
69
70
//------------------------------------------------------------------------------
71
// Build instruction variants.
72
//------------------------------------------------------------------------------
73
74
24.1M
MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
75
24.1M
  return insertInstr(buildInstrNoInsert(Opcode));
76
24.1M
}
77
78
25.7M
MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
79
25.7M
  MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80
25.7M
  return MIB;
81
25.7M
}
82
83
25.7M
MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
84
25.7M
  getMBB().insert(getInsertPt(), MIB);
85
25.7M
  recordInsertion(MIB);
86
25.7M
  return MIB;
87
25.7M
}
88
89
MachineInstrBuilder
90
MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
91
5
                                      const MDNode *Expr) {
92
5
  assert(isa<DILocalVariable>(Variable) && "not a variable");
93
5
  assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94
5
  assert(
95
5
      cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96
5
      "Expected inlined-at fields to agree");
97
5
  return insertInstr(BuildMI(getMF(), getDL(),
98
5
                             getTII().get(TargetOpcode::DBG_VALUE),
99
5
                             /*IsIndirect*/ false, Reg, Variable, Expr));
100
5
}
101
102
MachineInstrBuilder
103
MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
104
2
                                        const MDNode *Expr) {
105
2
  assert(isa<DILocalVariable>(Variable) && "not a variable");
106
2
  assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
107
2
  assert(
108
2
      cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
109
2
      "Expected inlined-at fields to agree");
110
2
  return insertInstr(BuildMI(getMF(), getDL(),
111
2
                             getTII().get(TargetOpcode::DBG_VALUE),
112
2
                             /*IsIndirect*/ true, Reg, Variable, Expr));
113
2
}
114
115
MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
116
                                                      const MDNode *Variable,
117
0
                                                      const MDNode *Expr) {
118
0
  assert(isa<DILocalVariable>(Variable) && "not a variable");
119
0
  assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
120
0
  assert(
121
0
      cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
122
0
      "Expected inlined-at fields to agree");
123
0
  return buildInstr(TargetOpcode::DBG_VALUE)
124
0
      .addFrameIndex(FI)
125
0
      .addImm(0)
126
0
      .addMetadata(Variable)
127
0
      .addMetadata(Expr);
128
0
}
129
130
MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
131
                                                         const MDNode *Variable,
132
5
                                                         const MDNode *Expr) {
133
5
  assert(isa<DILocalVariable>(Variable) && "not a variable");
134
5
  assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
135
5
  assert(
136
5
      cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
137
5
      "Expected inlined-at fields to agree");
138
5
  auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
139
5
  if (auto *CI = dyn_cast<ConstantInt>(&C)) {
140
3
    if (CI->getBitWidth() > 64)
141
0
      MIB.addCImm(CI);
142
3
    else
143
3
      MIB.addImm(CI->getZExtValue());
144
3
  } else 
if (auto *2
CFP2
= dyn_cast<ConstantFP>(&C)) {
145
1
    MIB.addFPImm(CFP);
146
1
  } else {
147
1
    // Insert %noreg if we didn't find a usable constant and had to drop it.
148
1
    MIB.addReg(0U);
149
1
  }
150
5
151
5
  return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
152
5
}
153
154
0
MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
155
0
  assert(isa<DILabel>(Label) && "not a label");
156
0
  assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
157
0
         "Expected inlined-at fields to agree");
158
0
  auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
159
0
160
0
  return MIB.addMetadata(Label);
161
0
}
162
163
MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
164
65.7k
                                                      int Idx) {
165
65.7k
  assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
166
65.7k
  auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
167
65.7k
  Res.addDefToMIB(*getMRI(), MIB);
168
65.7k
  MIB.addFrameIndex(Idx);
169
65.7k
  return MIB;
170
65.7k
}
171
172
MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
173
498k
                                                       const GlobalValue *GV) {
174
498k
  assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
175
498k
  assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
176
498k
             GV->getType()->getAddressSpace() &&
177
498k
         "address space mismatch");
178
498k
179
498k
  auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
180
498k
  Res.addDefToMIB(*getMRI(), MIB);
181
498k
  MIB.addGlobalAddress(GV);
182
498k
  return MIB;
183
498k
}
184
185
MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
186
2.27k
                                                     unsigned JTI) {
187
2.27k
  return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
188
2.27k
      .addJumpTableIndex(JTI);
189
2.27k
}
190
191
void MachineIRBuilder::validateBinaryOp(const LLT &Res, const LLT &Op0,
192
923k
                                        const LLT &Op1) {
193
923k
  assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
194
923k
  assert((Res == Op0 && Res == Op1) && "type mismatch");
195
923k
}
196
197
void MachineIRBuilder::validateShiftOp(const LLT &Res, const LLT &Op0,
198
148k
                                       const LLT &Op1) {
199
148k
  assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
200
148k
  assert((Res == Op0) && "type mismatch");
201
148k
}
202
203
MachineInstrBuilder MachineIRBuilder::buildGEP(const DstOp &Res,
204
                                               const SrcOp &Op0,
205
1.85M
                                               const SrcOp &Op1) {
206
1.85M
  assert(Res.getLLTTy(*getMRI()).isPointer() &&
207
1.85M
         Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
208
1.85M
  assert(Op1.getLLTTy(*getMRI()).isScalar() && "invalid offset type");
209
1.85M
210
1.85M
  auto MIB = buildInstr(TargetOpcode::G_GEP);
211
1.85M
  Res.addDefToMIB(*getMRI(), MIB);
212
1.85M
  Op0.addSrcToMIB(MIB);
213
1.85M
  Op1.addSrcToMIB(MIB);
214
1.85M
  return MIB;
215
1.85M
}
216
217
Optional<MachineInstrBuilder>
218
MachineIRBuilder::materializeGEP(Register &Res, Register Op0,
219
2.11M
                                 const LLT &ValueTy, uint64_t Value) {
220
2.11M
  assert(Res == 0 && "Res is a result argument");
221
2.11M
  assert(ValueTy.isScalar()  && "invalid offset type");
222
2.11M
223
2.11M
  if (Value == 0) {
224
2.11M
    Res = Op0;
225
2.11M
    return None;
226
2.11M
  }
227
1.50k
228
1.50k
  Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
229
1.50k
  auto Cst = buildConstant(ValueTy, Value);
230
1.50k
  return buildGEP(Res, Op0, Cst.getReg(0));
231
1.50k
}
232
233
MachineInstrBuilder MachineIRBuilder::buildPtrMask(const DstOp &Res,
234
                                                   const SrcOp &Op0,
235
98
                                                   uint32_t NumBits) {
236
98
  assert(Res.getLLTTy(*getMRI()).isPointer() &&
237
98
         Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
238
98
239
98
  auto MIB = buildInstr(TargetOpcode::G_PTR_MASK);
240
98
  Res.addDefToMIB(*getMRI(), MIB);
241
98
  Op0.addSrcToMIB(MIB);
242
98
  MIB.addImm(NumBits);
243
98
  return MIB;
244
98
}
245
246
784k
MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
247
784k
  return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
248
784k
}
249
250
5
MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
251
5
  assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
252
5
  return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
253
5
}
254
255
MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
256
                                                unsigned JTI,
257
2.27k
                                                Register IndexReg) {
258
2.27k
  assert(getMRI()->getType(TablePtr).isPointer() &&
259
2.27k
         "Table reg must be a pointer");
260
2.27k
  return buildInstr(TargetOpcode::G_BRJT)
261
2.27k
      .addUse(TablePtr)
262
2.27k
      .addJumpTableIndex(JTI)
263
2.27k
      .addUse(IndexReg);
264
2.27k
}
265
266
MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
267
5.23M
                                                const SrcOp &Op) {
268
5.23M
  return buildInstr(TargetOpcode::COPY, Res, Op);
269
5.23M
}
270
271
MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
272
2.78M
                                                    const ConstantInt &Val) {
273
2.78M
  LLT Ty = Res.getLLTTy(*getMRI());
274
2.78M
  LLT EltTy = Ty.getScalarType();
275
2.78M
  assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
276
2.78M
         "creating constant with the wrong size");
277
2.78M
278
2.78M
  if (Ty.isVector()) {
279
1
    auto Const = buildInstr(TargetOpcode::G_CONSTANT)
280
1
    .addDef(getMRI()->createGenericVirtualRegister(EltTy))
281
1
    .addCImm(&Val);
282
1
    return buildSplatVector(Res, Const);
283
1
  }
284
2.78M
285
2.78M
  auto Const = buildInstr(TargetOpcode::G_CONSTANT);
286
2.78M
  Res.addDefToMIB(*getMRI(), Const);
287
2.78M
  Const.addCImm(&Val);
288
2.78M
  return Const;
289
2.78M
}
290
291
MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
292
1.83M
                                                    int64_t Val) {
293
1.83M
  auto IntN = IntegerType::get(getMF().getFunction().getContext(),
294
1.83M
                               Res.getLLTTy(*getMRI()).getScalarSizeInBits());
295
1.83M
  ConstantInt *CI = ConstantInt::get(IntN, Val, true);
296
1.83M
  return buildConstant(Res, *CI);
297
1.83M
}
298
299
MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
300
28.4k
                                                     const ConstantFP &Val) {
301
28.4k
  LLT Ty = Res.getLLTTy(*getMRI());
302
28.4k
  LLT EltTy = Ty.getScalarType();
303
28.4k
304
28.4k
  assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
305
28.4k
         == EltTy.getSizeInBits() &&
306
28.4k
         "creating fconstant with the wrong size");
307
28.4k
308
28.4k
  assert(!Ty.isPointer() && "invalid operand type");
309
28.4k
310
28.4k
  if (Ty.isVector()) {
311
1
    auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
312
1
    .addDef(getMRI()->createGenericVirtualRegister(EltTy))
313
1
    .addFPImm(&Val);
314
1
315
1
    return buildSplatVector(Res, Const);
316
1
  }
317
28.4k
318
28.4k
  auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
319
28.4k
  Res.addDefToMIB(*getMRI(), Const);
320
28.4k
  Const.addFPImm(&Val);
321
28.4k
  return Const;
322
28.4k
}
323
324
MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
325
221k
                                                    const APInt &Val) {
326
221k
  ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
327
221k
  return buildConstant(Res, *CI);
328
221k
}
329
330
MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
331
14
                                                     double Val) {
332
14
  LLT DstTy = Res.getLLTTy(*getMRI());
333
14
  auto &Ctx = getMF().getFunction().getContext();
334
14
  auto *CFP =
335
14
      ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
336
14
  return buildFConstant(Res, *CFP);
337
14
}
338
339
MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
340
7
                                                     const APFloat &Val) {
341
7
  auto &Ctx = getMF().getFunction().getContext();
342
7
  auto *CFP = ConstantFP::get(Ctx, Val);
343
7
  return buildFConstant(Res, *CFP);
344
7
}
345
346
MachineInstrBuilder MachineIRBuilder::buildBrCond(Register Tst,
347
967k
                                                  MachineBasicBlock &Dest) {
348
967k
  assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
349
967k
350
967k
  return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
351
967k
}
352
353
MachineInstrBuilder MachineIRBuilder::buildLoad(const DstOp &Res,
354
                                                const SrcOp &Addr,
355
1.25M
                                                MachineMemOperand &MMO) {
356
1.25M
  return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
357
1.25M
}
358
359
MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
360
                                                     const DstOp &Res,
361
                                                     const SrcOp &Addr,
362
1.25M
                                                     MachineMemOperand &MMO) {
363
1.25M
  assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
364
1.25M
  assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
365
1.25M
366
1.25M
  auto MIB = buildInstr(Opcode);
367
1.25M
  Res.addDefToMIB(*getMRI(), MIB);
368
1.25M
  Addr.addSrcToMIB(MIB);
369
1.25M
  MIB.addMemOperand(&MMO);
370
1.25M
  return MIB;
371
1.25M
}
372
373
MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
374
                                                 const SrcOp &Addr,
375
953k
                                                 MachineMemOperand &MMO) {
376
953k
  assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
377
953k
  assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
378
953k
379
953k
  auto MIB = buildInstr(TargetOpcode::G_STORE);
380
953k
  Val.addSrcToMIB(MIB);
381
953k
  Addr.addSrcToMIB(MIB);
382
953k
  MIB.addMemOperand(&MMO);
383
953k
  return MIB;
384
953k
}
385
386
MachineInstrBuilder MachineIRBuilder::buildUAddo(const DstOp &Res,
387
                                                 const DstOp &CarryOut,
388
                                                 const SrcOp &Op0,
389
19
                                                 const SrcOp &Op1) {
390
19
  return buildInstr(TargetOpcode::G_UADDO, {Res, CarryOut}, {Op0, Op1});
391
19
}
392
393
MachineInstrBuilder MachineIRBuilder::buildUAdde(const DstOp &Res,
394
                                                 const DstOp &CarryOut,
395
                                                 const SrcOp &Op0,
396
                                                 const SrcOp &Op1,
397
57
                                                 const SrcOp &CarryIn) {
398
57
  return buildInstr(TargetOpcode::G_UADDE, {Res, CarryOut},
399
57
                    {Op0, Op1, CarryIn});
400
57
}
401
402
MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
403
42.2k
                                                  const SrcOp &Op) {
404
42.2k
  return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
405
42.2k
}
406
407
MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
408
4.05k
                                                const SrcOp &Op) {
409
4.05k
  return buildInstr(TargetOpcode::G_SEXT, Res, Op);
410
4.05k
}
411
412
MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
413
39.7k
                                                const SrcOp &Op) {
414
39.7k
  return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
415
39.7k
}
416
417
124
unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
418
124
  const auto *TLI = getMF().getSubtarget().getTargetLowering();
419
124
  switch (TLI->getBooleanContents(IsVec, IsFP)) {
420
124
  case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
421
0
    return TargetOpcode::G_SEXT;
422
124
  case TargetLoweringBase::ZeroOrOneBooleanContent:
423
124
    return TargetOpcode::G_ZEXT;
424
124
  default:
425
0
    return TargetOpcode::G_ANYEXT;
426
124
  }
427
124
}
428
429
MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
430
                                                   const SrcOp &Op,
431
0
                                                   bool IsFP) {
432
0
  unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
433
0
  return buildInstr(ExtOp, Res, Op);
434
0
}
435
436
MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
437
                                                      const DstOp &Res,
438
258k
                                                      const SrcOp &Op) {
439
258k
  assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
440
258k
          TargetOpcode::G_SEXT == ExtOpc) &&
441
258k
         "Expecting Extending Opc");
442
258k
  assert(Res.getLLTTy(*getMRI()).isScalar() ||
443
258k
         Res.getLLTTy(*getMRI()).isVector());
444
258k
  assert(Res.getLLTTy(*getMRI()).isScalar() ==
445
258k
         Op.getLLTTy(*getMRI()).isScalar());
446
258k
447
258k
  unsigned Opcode = TargetOpcode::COPY;
448
258k
  if (Res.getLLTTy(*getMRI()).getSizeInBits() >
449
258k
      Op.getLLTTy(*getMRI()).getSizeInBits())
450
51.0k
    Opcode = ExtOpc;
451
207k
  else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
452
207k
           Op.getLLTTy(*getMRI()).getSizeInBits())
453
21.8k
    Opcode = TargetOpcode::G_TRUNC;
454
207k
  else
455
207k
    assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
456
258k
457
258k
  return buildInstr(Opcode, Res, Op);
458
258k
}
459
460
MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
461
16
                                                       const SrcOp &Op) {
462
16
  return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
463
16
}
464
465
MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
466
4.58k
                                                       const SrcOp &Op) {
467
4.58k
  return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
468
4.58k
}
469
470
MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
471
253k
                                                         const SrcOp &Op) {
472
253k
  return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
473
253k
}
474
475
MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
476
128k
                                                const SrcOp &Src) {
477
128k
  LLT SrcTy = Src.getLLTTy(*getMRI());
478
128k
  LLT DstTy = Dst.getLLTTy(*getMRI());
479
128k
  if (SrcTy == DstTy)
480
17
    return buildCopy(Dst, Src);
481
128k
482
128k
  unsigned Opcode;
483
128k
  if (SrcTy.isPointer() && 
DstTy.isScalar()14.0k
)
484
14.0k
    Opcode = TargetOpcode::G_PTRTOINT;
485
114k
  else if (DstTy.isPointer() && 
SrcTy.isScalar()114k
)
486
114k
    Opcode = TargetOpcode::G_INTTOPTR;
487
1
  else {
488
1
    assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
489
1
    Opcode = TargetOpcode::G_BITCAST;
490
1
  }
491
128k
492
128k
  return buildInstr(Opcode, Dst, Src);
493
128k
}
494
495
MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
496
                                                   const SrcOp &Src,
497
398
                                                   uint64_t Index) {
498
398
  LLT SrcTy = Src.getLLTTy(*getMRI());
499
398
  LLT DstTy = Dst.getLLTTy(*getMRI());
500
398
501
#ifndef NDEBUG
502
  assert(SrcTy.isValid() && "invalid operand type");
503
  assert(DstTy.isValid() && "invalid operand type");
504
  assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
505
         "extracting off end of register");
506
#endif
507
508
398
  if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
509
17
    assert(Index == 0 && "insertion past the end of a register");
510
17
    return buildCast(Dst, Src);
511
17
  }
512
381
513
381
  auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
514
381
  Dst.addDefToMIB(*getMRI(), Extract);
515
381
  Src.addSrcToMIB(Extract);
516
381
  Extract.addImm(Index);
517
381
  return Extract;
518
381
}
519
520
void MachineIRBuilder::buildSequence(Register Res, ArrayRef<Register> Ops,
521
0
                                     ArrayRef<uint64_t> Indices) {
522
#ifndef NDEBUG
523
  assert(Ops.size() == Indices.size() && "incompatible args");
524
  assert(!Ops.empty() && "invalid trivial sequence");
525
  assert(std::is_sorted(Indices.begin(), Indices.end()) &&
526
         "sequence offsets must be in ascending order");
527
528
  assert(getMRI()->getType(Res).isValid() && "invalid operand type");
529
  for (auto Op : Ops)
530
    assert(getMRI()->getType(Op).isValid() && "invalid operand type");
531
#endif
532
533
0
  LLT ResTy = getMRI()->getType(Res);
534
0
  LLT OpTy = getMRI()->getType(Ops[0]);
535
0
  unsigned OpSize = OpTy.getSizeInBits();
536
0
  bool MaybeMerge = true;
537
0
  for (unsigned i = 0; i < Ops.size(); ++i) {
538
0
    if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
539
0
      MaybeMerge = false;
540
0
      break;
541
0
    }
542
0
  }
543
0
544
0
  if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
545
0
    buildMerge(Res, Ops);
546
0
    return;
547
0
  }
548
0
549
0
  Register ResIn = getMRI()->createGenericVirtualRegister(ResTy);
550
0
  buildUndef(ResIn);
551
0
552
0
  for (unsigned i = 0; i < Ops.size(); ++i) {
553
0
    Register ResOut = i + 1 == Ops.size()
554
0
                          ? Res
555
0
                          : getMRI()->createGenericVirtualRegister(ResTy);
556
0
    buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
557
0
    ResIn = ResOut;
558
0
  }
559
0
}
560
561
20.2k
MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
562
20.2k
  return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
563
20.2k
}
564
565
MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
566
751
                                                 ArrayRef<Register> Ops) {
567
751
  // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
568
751
  // we need some temporary storage for the DstOp objects. Here we use a
569
751
  // sufficiently large SmallVector to not go through the heap.
570
751
  SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
571
751
  assert(TmpVec.size() > 1);
572
751
  return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
573
751
}
574
575
MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
576
12
                                                   const SrcOp &Op) {
577
12
  // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
578
12
  // we need some temporary storage for the DstOp objects. Here we use a
579
12
  // sufficiently large SmallVector to not go through the heap.
580
12
  SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
581
12
  assert(TmpVec.size() > 1);
582
12
  return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
583
12
}
584
585
MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
586
18
                                                   const SrcOp &Op) {
587
18
  unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
588
18
  SmallVector<Register, 8> TmpVec;
589
116
  for (unsigned I = 0; I != NumReg; 
++I98
)
590
98
    TmpVec.push_back(getMRI()->createGenericVirtualRegister(Res));
591
18
  return buildUnmerge(TmpVec, Op);
592
18
}
593
594
MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
595
4.50k
                                                   const SrcOp &Op) {
596
4.50k
  // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
597
4.50k
  // we need some temporary storage for the DstOp objects. Here we use a
598
4.50k
  // sufficiently large SmallVector to not go through the heap.
599
4.50k
  SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
600
4.50k
  assert(TmpVec.size() > 1);
601
4.50k
  return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
602
4.50k
}
603
604
MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
605
49.7k
                                                       ArrayRef<Register> Ops) {
606
49.7k
  // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
607
49.7k
  // we need some temporary storage for the DstOp objects. Here we use a
608
49.7k
  // sufficiently large SmallVector to not go through the heap.
609
49.7k
  SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
610
49.7k
  return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
611
49.7k
}
612
613
MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
614
126
                                                       const SrcOp &Src) {
615
126
  SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
616
126
  return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
617
126
}
618
619
MachineInstrBuilder
620
MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
621
0
                                        ArrayRef<Register> Ops) {
622
0
  // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
623
0
  // we need some temporary storage for the DstOp objects. Here we use a
624
0
  // sufficiently large SmallVector to not go through the heap.
625
0
  SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
626
0
  return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
627
0
}
628
629
MachineInstrBuilder
630
309
MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
631
309
  // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
632
309
  // we need some temporary storage for the DstOp objects. Here we use a
633
309
  // sufficiently large SmallVector to not go through the heap.
634
309
  SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
635
309
  return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
636
309
}
637
638
MachineInstrBuilder MachineIRBuilder::buildInsert(Register Res, Register Src,
639
213
                                                  Register Op, unsigned Index) {
640
213
  assert(Index + getMRI()->getType(Op).getSizeInBits() <=
641
213
             getMRI()->getType(Res).getSizeInBits() &&
642
213
         "insertion past the end of a register");
643
213
644
213
  if (getMRI()->getType(Res).getSizeInBits() ==
645
213
      getMRI()->getType(Op).getSizeInBits()) {
646
0
    return buildCast(Res, Op);
647
0
  }
648
213
649
213
  return buildInstr(TargetOpcode::G_INSERT)
650
213
      .addDef(Res)
651
213
      .addUse(Src)
652
213
      .addUse(Op)
653
213
      .addImm(Index);
654
213
}
655
656
MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
657
                                                     ArrayRef<Register> ResultRegs,
658
102k
                                                     bool HasSideEffects) {
659
102k
  auto MIB =
660
102k
      buildInstr(HasSideEffects ? 
TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS98.1k
661
102k
                                : 
TargetOpcode::G_INTRINSIC4.34k
);
662
102k
  for (unsigned ResultReg : ResultRegs)
663
51.0k
    MIB.addDef(ResultReg);
664
102k
  MIB.addIntrinsicID(ID);
665
102k
  return MIB;
666
102k
}
667
668
MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
669
                                                     ArrayRef<DstOp> Results,
670
9
                                                     bool HasSideEffects) {
671
9
  auto MIB =
672
9
      buildInstr(HasSideEffects ? 
TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS0
673
9
                                : TargetOpcode::G_INTRINSIC);
674
9
  for (DstOp Result : Results)
675
9
    Result.addDefToMIB(*getMRI(), MIB);
676
9
  MIB.addIntrinsicID(ID);
677
9
  return MIB;
678
9
}
679
680
MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
681
60.2k
                                                 const SrcOp &Op) {
682
60.2k
  return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
683
60.2k
}
684
685
MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
686
1
                                                   const SrcOp &Op) {
687
1
  return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op);
688
1
}
689
690
MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
691
                                                const DstOp &Res,
692
                                                const SrcOp &Op0,
693
1.07M
                                                const SrcOp &Op1) {
694
1.07M
  return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
695
1.07M
}
696
697
MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
698
                                                const DstOp &Res,
699
                                                const SrcOp &Op0,
700
48
                                                const SrcOp &Op1) {
701
48
702
48
  return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1});
703
48
}
704
705
MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
706
                                                  const SrcOp &Tst,
707
                                                  const SrcOp &Op0,
708
2.85k
                                                  const SrcOp &Op1) {
709
2.85k
710
2.85k
  return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1});
711
2.85k
}
712
713
MachineInstrBuilder
714
MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
715
13.1k
                                           const SrcOp &Elt, const SrcOp &Idx) {
716
13.1k
  return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
717
13.1k
}
718
719
MachineInstrBuilder
720
MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
721
5.19k
                                            const SrcOp &Idx) {
722
5.19k
  return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
723
5.19k
}
724
725
MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
726
    Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
727
3
    Register NewVal, MachineMemOperand &MMO) {
728
#ifndef NDEBUG
729
  LLT OldValResTy = getMRI()->getType(OldValRes);
730
  LLT SuccessResTy = getMRI()->getType(SuccessRes);
731
  LLT AddrTy = getMRI()->getType(Addr);
732
  LLT CmpValTy = getMRI()->getType(CmpVal);
733
  LLT NewValTy = getMRI()->getType(NewVal);
734
  assert(OldValResTy.isScalar() && "invalid operand type");
735
  assert(SuccessResTy.isScalar() && "invalid operand type");
736
  assert(AddrTy.isPointer() && "invalid operand type");
737
  assert(CmpValTy.isValid() && "invalid operand type");
738
  assert(NewValTy.isValid() && "invalid operand type");
739
  assert(OldValResTy == CmpValTy && "type mismatch");
740
  assert(OldValResTy == NewValTy && "type mismatch");
741
#endif
742
743
3
  return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
744
3
      .addDef(OldValRes)
745
3
      .addDef(SuccessRes)
746
3
      .addUse(Addr)
747
3
      .addUse(CmpVal)
748
3
      .addUse(NewVal)
749
3
      .addMemOperand(&MMO);
750
3
}
751
752
MachineInstrBuilder
753
MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
754
                                     Register CmpVal, Register NewVal,
755
2
                                     MachineMemOperand &MMO) {
756
#ifndef NDEBUG
757
  LLT OldValResTy = getMRI()->getType(OldValRes);
758
  LLT AddrTy = getMRI()->getType(Addr);
759
  LLT CmpValTy = getMRI()->getType(CmpVal);
760
  LLT NewValTy = getMRI()->getType(NewVal);
761
  assert(OldValResTy.isScalar() && "invalid operand type");
762
  assert(AddrTy.isPointer() && "invalid operand type");
763
  assert(CmpValTy.isValid() && "invalid operand type");
764
  assert(NewValTy.isValid() && "invalid operand type");
765
  assert(OldValResTy == CmpValTy && "type mismatch");
766
  assert(OldValResTy == NewValTy && "type mismatch");
767
#endif
768
769
2
  return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
770
2
      .addDef(OldValRes)
771
2
      .addUse(Addr)
772
2
      .addUse(CmpVal)
773
2
      .addUse(NewVal)
774
2
      .addMemOperand(&MMO);
775
2
}
776
777
MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(unsigned Opcode,
778
                                                     Register OldValRes,
779
                                                     Register Addr,
780
                                                     Register Val,
781
22
                                                     MachineMemOperand &MMO) {
782
#ifndef NDEBUG
783
  LLT OldValResTy = getMRI()->getType(OldValRes);
784
  LLT AddrTy = getMRI()->getType(Addr);
785
  LLT ValTy = getMRI()->getType(Val);
786
  assert(OldValResTy.isScalar() && "invalid operand type");
787
  assert(AddrTy.isPointer() && "invalid operand type");
788
  assert(ValTy.isValid() && "invalid operand type");
789
  assert(OldValResTy == ValTy && "type mismatch");
790
#endif
791
792
22
  return buildInstr(Opcode)
793
22
      .addDef(OldValRes)
794
22
      .addUse(Addr)
795
22
      .addUse(Val)
796
22
      .addMemOperand(&MMO);
797
22
}
798
799
MachineInstrBuilder
800
MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
801
0
                                     Register Val, MachineMemOperand &MMO) {
802
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
803
0
                        MMO);
804
0
}
805
MachineInstrBuilder
806
MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
807
0
                                    Register Val, MachineMemOperand &MMO) {
808
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
809
0
                        MMO);
810
0
}
811
MachineInstrBuilder
812
MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
813
0
                                    Register Val, MachineMemOperand &MMO) {
814
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
815
0
                        MMO);
816
0
}
817
MachineInstrBuilder
818
MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
819
0
                                    Register Val, MachineMemOperand &MMO) {
820
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
821
0
                        MMO);
822
0
}
823
MachineInstrBuilder
824
MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
825
0
                                     Register Val, MachineMemOperand &MMO) {
826
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
827
0
                        MMO);
828
0
}
829
MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
830
                                                       Register Addr,
831
                                                       Register Val,
832
0
                                                       MachineMemOperand &MMO) {
833
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
834
0
                        MMO);
835
0
}
836
MachineInstrBuilder
837
MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
838
0
                                    Register Val, MachineMemOperand &MMO) {
839
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
840
0
                        MMO);
841
0
}
842
MachineInstrBuilder
843
MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
844
0
                                    Register Val, MachineMemOperand &MMO) {
845
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
846
0
                        MMO);
847
0
}
848
MachineInstrBuilder
849
MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
850
0
                                    Register Val, MachineMemOperand &MMO) {
851
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
852
0
                        MMO);
853
0
}
854
MachineInstrBuilder
855
MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
856
0
                                     Register Val, MachineMemOperand &MMO) {
857
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
858
0
                        MMO);
859
0
}
860
MachineInstrBuilder
861
MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
862
0
                                     Register Val, MachineMemOperand &MMO) {
863
0
  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
864
0
                        MMO);
865
0
}
866
867
MachineInstrBuilder
868
1.62k
MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
869
1.62k
  return buildInstr(TargetOpcode::G_FENCE)
870
1.62k
    .addImm(Ordering)
871
1.62k
    .addImm(Scope);
872
1.62k
}
873
874
MachineInstrBuilder
875
2
MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
876
#ifndef NDEBUG
877
  assert(getMRI()->getType(Res).isPointer() && "invalid res type");
878
#endif
879
880
2
  return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
881
2
}
882
883
void MachineIRBuilder::validateTruncExt(const LLT &DstTy, const LLT &SrcTy,
884
2.27M
                                        bool IsExtend) {
885
#ifndef NDEBUG
886
  if (DstTy.isVector()) {
887
    assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
888
    assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
889
           "different number of elements in a trunc/ext");
890
  } else
891
    assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
892
893
  if (IsExtend)
894
    assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
895
           "invalid narrowing extend");
896
  else
897
    assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
898
           "invalid widening trunc");
899
#endif
900
}
901
902
void MachineIRBuilder::validateSelectOp(const LLT &ResTy, const LLT &TstTy,
903
83.3k
                                        const LLT &Op0Ty, const LLT &Op1Ty) {
904
#ifndef NDEBUG
905
  assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
906
         "invalid operand type");
907
  assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
908
  if (ResTy.isScalar() || ResTy.isPointer())
909
    assert(TstTy.isScalar() && "type mismatch");
910
  else
911
    assert((TstTy.isScalar() ||
912
            (TstTy.isVector() &&
913
             TstTy.getNumElements() == Op0Ty.getNumElements())) &&
914
           "type mismatch");
915
#endif
916
}
917
918
MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
919
                                                 ArrayRef<DstOp> DstOps,
920
                                                 ArrayRef<SrcOp> SrcOps,
921
11.7M
                                                 Optional<unsigned> Flags) {
922
11.7M
  switch (Opc) {
923
11.7M
  default:
924
1.68M
    break;
925
11.7M
  case TargetOpcode::G_SELECT: {
926
83.3k
    assert(DstOps.size() == 1 && "Invalid select");
927
83.3k
    assert(SrcOps.size() == 3 && "Invalid select");
928
83.3k
    validateSelectOp(
929
83.3k
        DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
930
83.3k
        SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
931
83.3k
    break;
932
11.7M
  }
933
11.7M
  case TargetOpcode::G_ADD:
934
923k
  case TargetOpcode::G_AND:
935
923k
  case TargetOpcode::G_MUL:
936
923k
  case TargetOpcode::G_OR:
937
923k
  case TargetOpcode::G_SUB:
938
923k
  case TargetOpcode::G_XOR:
939
923k
  case TargetOpcode::G_UDIV:
940
923k
  case TargetOpcode::G_SDIV:
941
923k
  case TargetOpcode::G_UREM:
942
923k
  case TargetOpcode::G_SREM:
943
923k
  case TargetOpcode::G_SMIN:
944
923k
  case TargetOpcode::G_SMAX:
945
923k
  case TargetOpcode::G_UMIN:
946
923k
  case TargetOpcode::G_UMAX: {
947
923k
    // All these are binary ops.
948
923k
    assert(DstOps.size() == 1 && "Invalid Dst");
949
923k
    assert(SrcOps.size() == 2 && "Invalid Srcs");
950
923k
    validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
951
923k
                     SrcOps[0].getLLTTy(*getMRI()),
952
923k
                     SrcOps[1].getLLTTy(*getMRI()));
953
923k
    break;
954
923k
  }
955
923k
  case TargetOpcode::G_SHL:
956
148k
  case TargetOpcode::G_ASHR:
957
148k
  case TargetOpcode::G_LSHR: {
958
148k
    assert(DstOps.size() == 1 && "Invalid Dst");
959
148k
    assert(SrcOps.size() == 2 && "Invalid Srcs");
960
148k
    validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
961
148k
                    SrcOps[0].getLLTTy(*getMRI()),
962
148k
                    SrcOps[1].getLLTTy(*getMRI()));
963
148k
    break;
964
148k
  }
965
897k
  case TargetOpcode::G_SEXT:
966
897k
  case TargetOpcode::G_ZEXT:
967
897k
  case TargetOpcode::G_ANYEXT:
968
897k
    assert(DstOps.size() == 1 && "Invalid Dst");
969
897k
    assert(SrcOps.size() == 1 && "Invalid Srcs");
970
897k
    validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
971
897k
                     SrcOps[0].getLLTTy(*getMRI()), true);
972
897k
    break;
973
1.37M
  case TargetOpcode::G_TRUNC:
974
1.37M
  case TargetOpcode::G_FPTRUNC: {
975
1.37M
    assert(DstOps.size() == 1 && "Invalid Dst");
976
1.37M
    assert(SrcOps.size() == 1 && "Invalid Srcs");
977
1.37M
    validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
978
1.37M
                     SrcOps[0].getLLTTy(*getMRI()), false);
979
1.37M
    break;
980
1.37M
  }
981
5.43M
  case TargetOpcode::COPY:
982
5.43M
    assert(DstOps.size() == 1 && "Invalid Dst");
983
5.43M
    // If the caller wants to add a subreg source it has to be done separately
984
5.43M
    // so we may not have any SrcOps at this point yet.
985
5.43M
    break;
986
1.37M
  case TargetOpcode::G_FCMP:
987
1.09M
  case TargetOpcode::G_ICMP: {
988
1.09M
    assert(DstOps.size() == 1 && "Invalid Dst Operands");
989
1.09M
    assert(SrcOps.size() == 3 && "Invalid Src Operands");
990
1.09M
    // For F/ICMP, the first src operand is the predicate, followed by
991
1.09M
    // the two comparands.
992
1.09M
    assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
993
1.09M
           "Expecting predicate");
994
1.09M
    assert([&]() -> bool {
995
1.09M
      CmpInst::Predicate Pred = SrcOps[0].getPredicate();
996
1.09M
      return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
997
1.09M
                                         : CmpInst::isFPPredicate(Pred);
998
1.09M
    }() && "Invalid predicate");
999
1.09M
    assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1000
1.09M
           "Type mismatch");
1001
1.09M
    assert([&]() -> bool {
1002
1.09M
      LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
1003
1.09M
      LLT DstTy = DstOps[0].getLLTTy(*getMRI());
1004
1.09M
      if (Op0Ty.isScalar() || Op0Ty.isPointer())
1005
1.09M
        return DstTy.isScalar();
1006
1.09M
      else
1007
1.09M
        return DstTy.isVector() &&
1008
1.09M
               DstTy.getNumElements() == Op0Ty.getNumElements();
1009
1.09M
    }() && "Type Mismatch");
1010
1.09M
    break;
1011
1.09M
  }
1012
1.09M
  case TargetOpcode::G_UNMERGE_VALUES: {
1013
4.51k
    assert(!DstOps.empty() && "Invalid trivial sequence");
1014
4.51k
    assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
1015
4.51k
    assert(std::all_of(DstOps.begin(), DstOps.end(),
1016
4.51k
                       [&, this](const DstOp &Op) {
1017
4.51k
                         return Op.getLLTTy(*getMRI()) ==
1018
4.51k
                                DstOps[0].getLLTTy(*getMRI());
1019
4.51k
                       }) &&
1020
4.51k
           "type mismatch in output list");
1021
4.51k
    assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1022
4.51k
               SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1023
4.51k
           "input operands do not cover output register");
1024
4.51k
    break;
1025
1.09M
  }
1026
1.09M
  case TargetOpcode::G_MERGE_VALUES: {
1027
751
    assert(!SrcOps.empty() && "invalid trivial sequence");
1028
751
    assert(DstOps.size() == 1 && "Invalid Dst");
1029
751
    assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1030
751
                       [&, this](const SrcOp &Op) {
1031
751
                         return Op.getLLTTy(*getMRI()) ==
1032
751
                                SrcOps[0].getLLTTy(*getMRI());
1033
751
                       }) &&
1034
751
           "type mismatch in input list");
1035
751
    assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1036
751
               DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1037
751
           "input operands do not cover output register");
1038
751
    if (SrcOps.size() == 1)
1039
0
      return buildCast(DstOps[0], SrcOps[0]);
1040
751
    if (DstOps[0].getLLTTy(*getMRI()).isVector())
1041
45
      return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
1042
706
    break;
1043
706
  }
1044
5.19k
  case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1045
5.19k
    assert(DstOps.size() == 1 && "Invalid Dst size");
1046
5.19k
    assert(SrcOps.size() == 2 && "Invalid Src size");
1047
5.19k
    assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1048
5.19k
    assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
1049
5.19k
            DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
1050
5.19k
           "Invalid operand type");
1051
5.19k
    assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
1052
5.19k
    assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1053
5.19k
               DstOps[0].getLLTTy(*getMRI()) &&
1054
5.19k
           "Type mismatch");
1055
5.19k
    break;
1056
706
  }
1057
13.1k
  case TargetOpcode::G_INSERT_VECTOR_ELT: {
1058
13.1k
    assert(DstOps.size() == 1 && "Invalid dst size");
1059
13.1k
    assert(SrcOps.size() == 3 && "Invalid src size");
1060
13.1k
    assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1061
13.1k
           SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1062
13.1k
    assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1063
13.1k
               SrcOps[1].getLLTTy(*getMRI()) &&
1064
13.1k
           "Type mismatch");
1065
13.1k
    assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
1066
13.1k
    assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
1067
13.1k
               SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
1068
13.1k
           "Type mismatch");
1069
13.1k
    break;
1070
706
  }
1071
49.8k
  case TargetOpcode::G_BUILD_VECTOR: {
1072
49.8k
    assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1073
49.8k
           "Must have at least 2 operands");
1074
49.8k
    assert(DstOps.size() == 1 && "Invalid DstOps");
1075
49.8k
    assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1076
49.8k
           "Res type must be a vector");
1077
49.8k
    assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1078
49.8k
                       [&, this](const SrcOp &Op) {
1079
49.8k
                         return Op.getLLTTy(*getMRI()) ==
1080
49.8k
                                SrcOps[0].getLLTTy(*getMRI());
1081
49.8k
                       }) &&
1082
49.8k
           "type mismatch in input list");
1083
49.8k
    assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1084
49.8k
               DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1085
49.8k
           "input scalars do not exactly cover the output vector register");
1086
49.8k
    break;
1087
706
  }
1088
706
  case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1089
0
    assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1090
0
           "Must have at least 2 operands");
1091
0
    assert(DstOps.size() == 1 && "Invalid DstOps");
1092
0
    assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1093
0
           "Res type must be a vector");
1094
0
    assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1095
0
                       [&, this](const SrcOp &Op) {
1096
0
                         return Op.getLLTTy(*getMRI()) ==
1097
0
                                SrcOps[0].getLLTTy(*getMRI());
1098
0
                       }) &&
1099
0
           "type mismatch in input list");
1100
0
    if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1101
0
        DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1102
0
      return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1103
0
    break;
1104
0
  }
1105
354
  case TargetOpcode::G_CONCAT_VECTORS: {
1106
354
    assert(DstOps.size() == 1 && "Invalid DstOps");
1107
354
    assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1108
354
           "Must have at least 2 operands");
1109
354
    assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1110
354
                       [&, this](const SrcOp &Op) {
1111
354
                         return (Op.getLLTTy(*getMRI()).isVector() &&
1112
354
                                 Op.getLLTTy(*getMRI()) ==
1113
354
                                     SrcOps[0].getLLTTy(*getMRI()));
1114
354
                       }) &&
1115
354
           "type mismatch in input list");
1116
354
    assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1117
354
               DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1118
354
           "input vectors do not exactly cover the output vector register");
1119
354
    break;
1120
0
  }
1121
57
  case TargetOpcode::G_UADDE: {
1122
57
    assert(DstOps.size() == 2 && "Invalid no of dst operands");
1123
57
    assert(SrcOps.size() == 3 && "Invalid no of src operands");
1124
57
    assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1125
57
    assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1126
57
           (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1127
57
           "Invalid operand");
1128
57
    assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1129
57
    assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1130
57
           "type mismatch");
1131
57
    break;
1132
11.7M
  }
1133
11.7M
  }
1134
11.7M
1135
11.7M
  auto MIB = buildInstr(Opc);
1136
11.7M
  for (const DstOp &Op : DstOps)
1137
11.4M
    Op.addDefToMIB(*getMRI(), MIB);
1138
11.7M
  for (const SrcOp &Op : SrcOps)
1139
14.7M
    Op.addSrcToMIB(MIB);
1140
11.7M
  if (Flags)
1141
1.09M
    MIB->setFlags(*Flags);
1142
11.7M
  return MIB;
1143
11.7M
}