/Users/buildslave/jenkins/sharedspace/clang-stage2-coverage-R@2/llvm/lib/CodeGen/GlobalISel/MachineIRBuilder.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==// |
2 | | // |
3 | | // The LLVM Compiler Infrastructure |
4 | | // |
5 | | // This file is distributed under the University of Illinois Open Source |
6 | | // License. See LICENSE.TXT for details. |
7 | | // |
8 | | //===----------------------------------------------------------------------===// |
9 | | /// \file |
10 | | /// This file implements the MachineIRBuidler class. |
11 | | //===----------------------------------------------------------------------===// |
12 | | #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h" |
13 | | |
14 | | #include "llvm/CodeGen/MachineFunction.h" |
15 | | #include "llvm/CodeGen/MachineInstr.h" |
16 | | #include "llvm/CodeGen/MachineInstrBuilder.h" |
17 | | #include "llvm/CodeGen/MachineRegisterInfo.h" |
18 | | #include "llvm/IR/DebugInfo.h" |
19 | | #include "llvm/Target/TargetInstrInfo.h" |
20 | | #include "llvm/Target/TargetOpcodes.h" |
21 | | #include "llvm/Target/TargetSubtargetInfo.h" |
22 | | |
23 | | using namespace llvm; |
24 | | |
25 | 1.02M | void MachineIRBuilder::setMF(MachineFunction &MF) { |
26 | 1.02M | this->MF = &MF; |
27 | 1.02M | this->MBB = nullptr; |
28 | 1.02M | this->MRI = &MF.getRegInfo(); |
29 | 1.02M | this->TII = MF.getSubtarget().getInstrInfo(); |
30 | 1.02M | this->DL = DebugLoc(); |
31 | 1.02M | this->II = MachineBasicBlock::iterator(); |
32 | 1.02M | this->InsertedInstr = nullptr; |
33 | 1.02M | } |
34 | | |
35 | 3.77M | void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) { |
36 | 3.77M | this->MBB = &MBB; |
37 | 3.77M | this->II = MBB.end(); |
38 | 3.77M | assert(&getMF() == MBB.getParent() && |
39 | 3.77M | "Basic block is in a different function"); |
40 | 3.77M | } |
41 | | |
42 | 894k | void MachineIRBuilder::setInstr(MachineInstr &MI) { |
43 | 894k | assert(MI.getParent() && "Instruction is not part of a basic block"); |
44 | 894k | setMBB(*MI.getParent()); |
45 | 894k | this->II = MI.getIterator(); |
46 | 894k | } |
47 | | |
48 | | void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB, |
49 | 29.2k | MachineBasicBlock::iterator II) { |
50 | 29.2k | assert(MBB.getParent() == &getMF() && |
51 | 29.2k | "Basic block is in a different function"); |
52 | 29.2k | this->MBB = &MBB; |
53 | 29.2k | this->II = II; |
54 | 29.2k | } |
55 | | |
56 | | void MachineIRBuilder::recordInsertions( |
57 | 5.93M | std::function<void(MachineInstr *)> Inserted) { |
58 | 5.93M | InsertedInstr = std::move(Inserted); |
59 | 5.93M | } |
60 | | |
61 | 5.93M | void MachineIRBuilder::stopRecordingInsertions() { |
62 | 5.93M | InsertedInstr = nullptr; |
63 | 5.93M | } |
64 | | |
65 | | //------------------------------------------------------------------------------ |
66 | | // Build instruction variants. |
67 | | //------------------------------------------------------------------------------ |
68 | | |
69 | 19.7M | MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) { |
70 | 19.7M | return insertInstr(buildInstrNoInsert(Opcode)); |
71 | 19.7M | } |
72 | | |
73 | 21.2M | MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) { |
74 | 21.2M | MachineInstrBuilder MIB = BuildMI(getMF(), DL, getTII().get(Opcode)); |
75 | 21.2M | return MIB; |
76 | 21.2M | } |
77 | | |
78 | | |
79 | 21.2M | MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) { |
80 | 21.2M | getMBB().insert(getInsertPt(), MIB); |
81 | 21.2M | if (InsertedInstr) |
82 | 1.89M | InsertedInstr(MIB); |
83 | 21.2M | return MIB; |
84 | 21.2M | } |
85 | | |
86 | | MachineInstrBuilder |
87 | | MachineIRBuilder::buildDirectDbgValue(unsigned Reg, const MDNode *Variable, |
88 | 4 | const MDNode *Expr) { |
89 | 4 | assert(isa<DILocalVariable>(Variable) && "not a variable"); |
90 | 4 | assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); |
91 | 4 | assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && |
92 | 4 | "Expected inlined-at fields to agree"); |
93 | 4 | return insertInstr(BuildMI(getMF(), DL, getTII().get(TargetOpcode::DBG_VALUE), |
94 | 4 | /*IsIndirect*/ false, Reg, Variable, Expr)); |
95 | 4 | } |
96 | | |
97 | | MachineInstrBuilder |
98 | | MachineIRBuilder::buildIndirectDbgValue(unsigned Reg, const MDNode *Variable, |
99 | 0 | const MDNode *Expr) { |
100 | 0 | assert(isa<DILocalVariable>(Variable) && "not a variable"); |
101 | 0 | assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); |
102 | 0 | assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && |
103 | 0 | "Expected inlined-at fields to agree"); |
104 | 0 | return insertInstr(BuildMI(getMF(), DL, getTII().get(TargetOpcode::DBG_VALUE), |
105 | 0 | /*IsIndirect*/ true, Reg, Variable, Expr)); |
106 | 0 | } |
107 | | |
108 | | MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI, |
109 | | const MDNode *Variable, |
110 | 0 | const MDNode *Expr) { |
111 | 0 | assert(isa<DILocalVariable>(Variable) && "not a variable"); |
112 | 0 | assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); |
113 | 0 | assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && |
114 | 0 | "Expected inlined-at fields to agree"); |
115 | 0 | return buildInstr(TargetOpcode::DBG_VALUE) |
116 | 0 | .addFrameIndex(FI) |
117 | 0 | .addImm(0) |
118 | 0 | .addMetadata(Variable) |
119 | 0 | .addMetadata(Expr); |
120 | 0 | } |
121 | | |
122 | | MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C, |
123 | | const MDNode *Variable, |
124 | 3 | const MDNode *Expr) { |
125 | 3 | assert(isa<DILocalVariable>(Variable) && "not a variable"); |
126 | 3 | assert(cast<DIExpression>(Expr)->isValid() && "not an expression"); |
127 | 3 | assert(cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(DL) && |
128 | 3 | "Expected inlined-at fields to agree"); |
129 | 3 | auto MIB = buildInstr(TargetOpcode::DBG_VALUE); |
130 | 3 | if (auto *CI3 = dyn_cast<ConstantInt>(&C)) { |
131 | 1 | if (CI->getBitWidth() > 64) |
132 | 0 | MIB.addCImm(CI); |
133 | 1 | else |
134 | 1 | MIB.addImm(CI->getZExtValue()); |
135 | 3 | } else if (auto *2 CFP2 = dyn_cast<ConstantFP>(&C)) { |
136 | 1 | MIB.addFPImm(CFP); |
137 | 2 | } else { |
138 | 1 | // Insert %noreg if we didn't find a usable constant and had to drop it. |
139 | 1 | MIB.addReg(0U); |
140 | 1 | } |
141 | 3 | |
142 | 3 | return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr); |
143 | 3 | } |
144 | | |
145 | 53.5k | MachineInstrBuilder MachineIRBuilder::buildFrameIndex(unsigned Res, int Idx) { |
146 | 53.5k | assert(MRI->getType(Res).isPointer() && "invalid operand type"); |
147 | 53.5k | return buildInstr(TargetOpcode::G_FRAME_INDEX) |
148 | 53.5k | .addDef(Res) |
149 | 53.5k | .addFrameIndex(Idx); |
150 | 53.5k | } |
151 | | |
152 | | MachineInstrBuilder MachineIRBuilder::buildGlobalValue(unsigned Res, |
153 | 477k | const GlobalValue *GV) { |
154 | 477k | assert(MRI->getType(Res).isPointer() && "invalid operand type"); |
155 | 477k | assert(MRI->getType(Res).getAddressSpace() == |
156 | 477k | GV->getType()->getAddressSpace() && |
157 | 477k | "address space mismatch"); |
158 | 477k | |
159 | 477k | return buildInstr(TargetOpcode::G_GLOBAL_VALUE) |
160 | 477k | .addDef(Res) |
161 | 477k | .addGlobalAddress(GV); |
162 | 477k | } |
163 | | |
164 | | MachineInstrBuilder MachineIRBuilder::buildBinaryOp(unsigned Opcode, unsigned Res, unsigned Op0, |
165 | 255k | unsigned Op1) { |
166 | 255k | assert((MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()) && |
167 | 255k | "invalid operand type"); |
168 | 255k | assert(MRI->getType(Res) == MRI->getType(Op0) && |
169 | 255k | MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); |
170 | 255k | |
171 | 255k | return buildInstr(Opcode) |
172 | 255k | .addDef(Res) |
173 | 255k | .addUse(Op0) |
174 | 255k | .addUse(Op1); |
175 | 255k | } |
176 | | |
177 | | MachineInstrBuilder MachineIRBuilder::buildAdd(unsigned Res, unsigned Op0, |
178 | 248 | unsigned Op1) { |
179 | 248 | return buildBinaryOp(TargetOpcode::G_ADD, Res, Op0, Op1); |
180 | 248 | } |
181 | | |
182 | | MachineInstrBuilder MachineIRBuilder::buildGEP(unsigned Res, unsigned Op0, |
183 | 1.56M | unsigned Op1) { |
184 | 1.56M | assert(MRI->getType(Res).isPointer() && |
185 | 1.56M | MRI->getType(Res) == MRI->getType(Op0) && "type mismatch"); |
186 | 1.56M | assert(MRI->getType(Op1).isScalar() && "invalid offset type"); |
187 | 1.56M | |
188 | 1.56M | return buildInstr(TargetOpcode::G_GEP) |
189 | 1.56M | .addDef(Res) |
190 | 1.56M | .addUse(Op0) |
191 | 1.56M | .addUse(Op1); |
192 | 1.56M | } |
193 | | |
194 | | Optional<MachineInstrBuilder> |
195 | | MachineIRBuilder::materializeGEP(unsigned &Res, unsigned Op0, |
196 | 28 | const LLT &ValueTy, uint64_t Value) { |
197 | 28 | assert(Res == 0 && "Res is a result argument"); |
198 | 28 | assert(ValueTy.isScalar() && "invalid offset type"); |
199 | 28 | |
200 | 28 | if (Value == 028 ) { |
201 | 14 | Res = Op0; |
202 | 14 | return None; |
203 | 14 | } |
204 | 14 | |
205 | 14 | Res = MRI->createGenericVirtualRegister(MRI->getType(Op0)); |
206 | 14 | unsigned TmpReg = MRI->createGenericVirtualRegister(ValueTy); |
207 | 14 | |
208 | 14 | buildConstant(TmpReg, Value); |
209 | 14 | return buildGEP(Res, Op0, TmpReg); |
210 | 14 | } |
211 | | |
212 | | MachineInstrBuilder MachineIRBuilder::buildPtrMask(unsigned Res, unsigned Op0, |
213 | 75 | uint32_t NumBits) { |
214 | 75 | assert(MRI->getType(Res).isPointer() && |
215 | 75 | MRI->getType(Res) == MRI->getType(Op0) && "type mismatch"); |
216 | 75 | |
217 | 75 | return buildInstr(TargetOpcode::G_PTR_MASK) |
218 | 75 | .addDef(Res) |
219 | 75 | .addUse(Op0) |
220 | 75 | .addImm(NumBits); |
221 | 75 | } |
222 | | |
223 | | MachineInstrBuilder MachineIRBuilder::buildSub(unsigned Res, unsigned Op0, |
224 | 837 | unsigned Op1) { |
225 | 837 | return buildBinaryOp(TargetOpcode::G_SUB, Res, Op0, Op1); |
226 | 837 | } |
227 | | |
228 | | MachineInstrBuilder MachineIRBuilder::buildMul(unsigned Res, unsigned Op0, |
229 | 217k | unsigned Op1) { |
230 | 217k | return buildBinaryOp(TargetOpcode::G_MUL, Res, Op0, Op1); |
231 | 217k | } |
232 | | |
233 | | MachineInstrBuilder MachineIRBuilder::buildAnd(unsigned Res, unsigned Op0, |
234 | 37.3k | unsigned Op1) { |
235 | 37.3k | return buildBinaryOp(TargetOpcode::G_AND, Res, Op0, Op1); |
236 | 37.3k | } |
237 | | |
238 | | MachineInstrBuilder MachineIRBuilder::buildOr(unsigned Res, unsigned Op0, |
239 | 10 | unsigned Op1) { |
240 | 10 | return buildBinaryOp(TargetOpcode::G_OR, Res, Op0, Op1); |
241 | 10 | } |
242 | | |
243 | 731k | MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) { |
244 | 731k | return buildInstr(TargetOpcode::G_BR).addMBB(&Dest); |
245 | 731k | } |
246 | | |
247 | 3 | MachineInstrBuilder MachineIRBuilder::buildBrIndirect(unsigned Tgt) { |
248 | 3 | assert(MRI->getType(Tgt).isPointer() && "invalid branch destination"); |
249 | 3 | return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt); |
250 | 3 | } |
251 | | |
252 | 4.82M | MachineInstrBuilder MachineIRBuilder::buildCopy(unsigned Res, unsigned Op) { |
253 | 4.82M | assert(MRI->getType(Res) == LLT() || MRI->getType(Op) == LLT() || |
254 | 4.82M | MRI->getType(Res) == MRI->getType(Op)); |
255 | 4.82M | return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op); |
256 | 4.82M | } |
257 | | |
258 | | MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res, |
259 | 1.88M | const ConstantInt &Val) { |
260 | 1.88M | LLT Ty = MRI->getType(Res); |
261 | 1.88M | |
262 | 1.88M | assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type"); |
263 | 1.88M | |
264 | 1.88M | const ConstantInt *NewVal = &Val; |
265 | 1.88M | if (Ty.getSizeInBits() != Val.getBitWidth()) |
266 | 181k | NewVal = ConstantInt::get(MF->getFunction()->getContext(), |
267 | 181k | Val.getValue().sextOrTrunc(Ty.getSizeInBits())); |
268 | 1.88M | |
269 | 1.88M | return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal); |
270 | 1.88M | } |
271 | | |
272 | | MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res, |
273 | 200k | int64_t Val) { |
274 | 200k | auto IntN = IntegerType::get(MF->getFunction()->getContext(), |
275 | 200k | MRI->getType(Res).getSizeInBits()); |
276 | 200k | ConstantInt *CI = ConstantInt::get(IntN, Val, true); |
277 | 200k | return buildConstant(Res, *CI); |
278 | 200k | } |
279 | | |
280 | | MachineInstrBuilder MachineIRBuilder::buildFConstant(unsigned Res, |
281 | 26.0k | const ConstantFP &Val) { |
282 | 26.0k | assert(MRI->getType(Res).isScalar() && "invalid operand type"); |
283 | 26.0k | |
284 | 26.0k | return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val); |
285 | 26.0k | } |
286 | | |
287 | | MachineInstrBuilder MachineIRBuilder::buildBrCond(unsigned Tst, |
288 | 899k | MachineBasicBlock &Dest) { |
289 | 899k | assert(MRI->getType(Tst).isScalar() && "invalid operand type"); |
290 | 899k | |
291 | 899k | return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest); |
292 | 899k | } |
293 | | |
294 | | MachineInstrBuilder MachineIRBuilder::buildLoad(unsigned Res, unsigned Addr, |
295 | 1.05M | MachineMemOperand &MMO) { |
296 | 1.05M | assert(MRI->getType(Res).isValid() && "invalid operand type"); |
297 | 1.05M | assert(MRI->getType(Addr).isPointer() && "invalid operand type"); |
298 | 1.05M | |
299 | 1.05M | return buildInstr(TargetOpcode::G_LOAD) |
300 | 1.05M | .addDef(Res) |
301 | 1.05M | .addUse(Addr) |
302 | 1.05M | .addMemOperand(&MMO); |
303 | 1.05M | } |
304 | | |
305 | | MachineInstrBuilder MachineIRBuilder::buildStore(unsigned Val, unsigned Addr, |
306 | 823k | MachineMemOperand &MMO) { |
307 | 823k | assert(MRI->getType(Val).isValid() && "invalid operand type"); |
308 | 823k | assert(MRI->getType(Addr).isPointer() && "invalid operand type"); |
309 | 823k | |
310 | 823k | return buildInstr(TargetOpcode::G_STORE) |
311 | 823k | .addUse(Val) |
312 | 823k | .addUse(Addr) |
313 | 823k | .addMemOperand(&MMO); |
314 | 823k | } |
315 | | |
316 | | MachineInstrBuilder MachineIRBuilder::buildUAdde(unsigned Res, |
317 | | unsigned CarryOut, |
318 | | unsigned Op0, unsigned Op1, |
319 | 6 | unsigned CarryIn) { |
320 | 6 | assert(MRI->getType(Res).isScalar() && "invalid operand type"); |
321 | 6 | assert(MRI->getType(Res) == MRI->getType(Op0) && |
322 | 6 | MRI->getType(Res) == MRI->getType(Op1) && "type mismatch"); |
323 | 6 | assert(MRI->getType(CarryOut).isScalar() && "invalid operand type"); |
324 | 6 | assert(MRI->getType(CarryOut) == MRI->getType(CarryIn) && "type mismatch"); |
325 | 6 | |
326 | 6 | return buildInstr(TargetOpcode::G_UADDE) |
327 | 6 | .addDef(Res) |
328 | 6 | .addDef(CarryOut) |
329 | 6 | .addUse(Op0) |
330 | 6 | .addUse(Op1) |
331 | 6 | .addUse(CarryIn); |
332 | 6 | } |
333 | | |
334 | 92.8k | MachineInstrBuilder MachineIRBuilder::buildAnyExt(unsigned Res, unsigned Op) { |
335 | 92.8k | validateTruncExt(Res, Op, true); |
336 | 92.8k | return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op); |
337 | 92.8k | } |
338 | | |
339 | 1.50k | MachineInstrBuilder MachineIRBuilder::buildSExt(unsigned Res, unsigned Op) { |
340 | 1.50k | validateTruncExt(Res, Op, true); |
341 | 1.50k | return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op); |
342 | 1.50k | } |
343 | | |
344 | 80.3k | MachineInstrBuilder MachineIRBuilder::buildZExt(unsigned Res, unsigned Op) { |
345 | 80.3k | validateTruncExt(Res, Op, true); |
346 | 80.3k | return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op); |
347 | 80.3k | } |
348 | | |
349 | | MachineInstrBuilder |
350 | 139k | MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc, unsigned Res, unsigned Op) { |
351 | 139k | assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc || |
352 | 139k | TargetOpcode::G_SEXT == ExtOpc) && |
353 | 139k | "Expecting Extending Opc"); |
354 | 139k | assert(MRI->getType(Res).isScalar() || MRI->getType(Res).isVector()); |
355 | 139k | assert(MRI->getType(Res).isScalar() == MRI->getType(Op).isScalar()); |
356 | 139k | |
357 | 139k | unsigned Opcode = TargetOpcode::COPY; |
358 | 139k | if (MRI->getType(Res).getSizeInBits() > MRI->getType(Op).getSizeInBits()) |
359 | 1.58k | Opcode = ExtOpc; |
360 | 137k | else if (137k MRI->getType(Res).getSizeInBits() < MRI->getType(Op).getSizeInBits()137k ) |
361 | 21.9k | Opcode = TargetOpcode::G_TRUNC; |
362 | 137k | else |
363 | 137k | assert(MRI->getType(Res) == MRI->getType(Op)); |
364 | 139k | |
365 | 139k | return buildInstr(Opcode).addDef(Res).addUse(Op); |
366 | 139k | } |
367 | | |
368 | | MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(unsigned Res, |
369 | 4 | unsigned Op) { |
370 | 4 | return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op); |
371 | 4 | } |
372 | | |
373 | | MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(unsigned Res, |
374 | 8 | unsigned Op) { |
375 | 8 | return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op); |
376 | 8 | } |
377 | | |
378 | | MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(unsigned Res, |
379 | 139k | unsigned Op) { |
380 | 139k | return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op); |
381 | 139k | } |
382 | | |
383 | 5.91k | MachineInstrBuilder MachineIRBuilder::buildCast(unsigned Dst, unsigned Src) { |
384 | 5.91k | LLT SrcTy = MRI->getType(Src); |
385 | 5.91k | LLT DstTy = MRI->getType(Dst); |
386 | 5.91k | if (SrcTy == DstTy) |
387 | 1.39k | return buildCopy(Dst, Src); |
388 | 4.51k | |
389 | 4.51k | unsigned Opcode; |
390 | 4.51k | if (SrcTy.isPointer() && 4.51k DstTy.isScalar()2.61k ) |
391 | 2.61k | Opcode = TargetOpcode::G_PTRTOINT; |
392 | 1.90k | else if (1.90k DstTy.isPointer() && 1.90k SrcTy.isScalar()1.90k ) |
393 | 1.90k | Opcode = TargetOpcode::G_INTTOPTR; |
394 | 0 | else { |
395 | 0 | assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet"); |
396 | 0 | Opcode = TargetOpcode::G_BITCAST; |
397 | 0 | } |
398 | 5.91k | |
399 | 5.91k | return buildInstr(Opcode).addDef(Dst).addUse(Src); |
400 | 5.91k | } |
401 | | |
402 | | MachineInstrBuilder MachineIRBuilder::buildExtract(unsigned Res, unsigned Src, |
403 | 11.2k | uint64_t Index) { |
404 | | #ifndef NDEBUG |
405 | | assert(MRI->getType(Src).isValid() && "invalid operand type"); |
406 | | assert(MRI->getType(Res).isValid() && "invalid operand type"); |
407 | | assert(Index + MRI->getType(Res).getSizeInBits() <= |
408 | | MRI->getType(Src).getSizeInBits() && |
409 | | "extracting off end of register"); |
410 | | #endif |
411 | | |
412 | 11.2k | if (MRI->getType(Res).getSizeInBits() == MRI->getType(Src).getSizeInBits()11.2k ) { |
413 | 7 | assert(Index == 0 && "insertion past the end of a register"); |
414 | 7 | return buildCast(Res, Src); |
415 | 7 | } |
416 | 11.2k | |
417 | 11.2k | return buildInstr(TargetOpcode::G_EXTRACT) |
418 | 11.2k | .addDef(Res) |
419 | 11.2k | .addUse(Src) |
420 | 11.2k | .addImm(Index); |
421 | 11.2k | } |
422 | | |
423 | | void MachineIRBuilder::buildSequence(unsigned Res, ArrayRef<unsigned> Ops, |
424 | 722 | ArrayRef<uint64_t> Indices) { |
425 | | #ifndef NDEBUG |
426 | | assert(Ops.size() == Indices.size() && "incompatible args"); |
427 | | assert(!Ops.empty() && "invalid trivial sequence"); |
428 | | assert(std::is_sorted(Indices.begin(), Indices.end()) && |
429 | | "sequence offsets must be in ascending order"); |
430 | | |
431 | | assert(MRI->getType(Res).isValid() && "invalid operand type"); |
432 | | for (auto Op : Ops) |
433 | | assert(MRI->getType(Op).isValid() && "invalid operand type"); |
434 | | #endif |
435 | | |
436 | 722 | LLT ResTy = MRI->getType(Res); |
437 | 722 | LLT OpTy = MRI->getType(Ops[0]); |
438 | 722 | unsigned OpSize = OpTy.getSizeInBits(); |
439 | 722 | bool MaybeMerge = true; |
440 | 1.70k | for (unsigned i = 0; i < Ops.size()1.70k ; ++i982 ) { |
441 | 1.53k | if (MRI->getType(Ops[i]) != OpTy || 1.53k Indices[i] != i * OpSize982 ) { |
442 | 552 | MaybeMerge = false; |
443 | 552 | break; |
444 | 552 | } |
445 | 1.53k | } |
446 | 722 | |
447 | 722 | if (MaybeMerge && 722 Ops.size() * OpSize == ResTy.getSizeInBits()170 ) { |
448 | 170 | buildMerge(Res, Ops); |
449 | 170 | return; |
450 | 170 | } |
451 | 552 | |
452 | 552 | unsigned ResIn = MRI->createGenericVirtualRegister(ResTy); |
453 | 552 | buildUndef(ResIn); |
454 | 552 | |
455 | 1.65k | for (unsigned i = 0; i < Ops.size()1.65k ; ++i1.10k ) { |
456 | 1.10k | unsigned ResOut = |
457 | 1.10k | i + 1 == Ops.size() ? Res552 : MRI->createGenericVirtualRegister(ResTy)554 ; |
458 | 1.10k | buildInsert(ResOut, ResIn, Ops[i], Indices[i]); |
459 | 1.10k | ResIn = ResOut; |
460 | 1.10k | } |
461 | 722 | } |
462 | | |
463 | 17.4k | MachineInstrBuilder MachineIRBuilder::buildUndef(unsigned Res) { |
464 | 17.4k | return buildInstr(TargetOpcode::G_IMPLICIT_DEF).addDef(Res); |
465 | 17.4k | } |
466 | | |
467 | | MachineInstrBuilder MachineIRBuilder::buildMerge(unsigned Res, |
468 | 52.0k | ArrayRef<unsigned> Ops) { |
469 | 52.0k | |
470 | | #ifndef NDEBUG |
471 | | assert(!Ops.empty() && "invalid trivial sequence"); |
472 | | LLT Ty = MRI->getType(Ops[0]); |
473 | | for (auto Reg : Ops) |
474 | | assert(MRI->getType(Reg) == Ty && "type mismatch in input list"); |
475 | | assert(Ops.size() * MRI->getType(Ops[0]).getSizeInBits() == |
476 | | MRI->getType(Res).getSizeInBits() && |
477 | | "input operands do not cover output register"); |
478 | | #endif |
479 | | |
480 | 52.0k | if (Ops.size() == 1) |
481 | 3.28k | return buildCast(Res, Ops[0]); |
482 | 48.7k | |
483 | 48.7k | MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_MERGE_VALUES); |
484 | 48.7k | MIB.addDef(Res); |
485 | 198k | for (unsigned i = 0; i < Ops.size()198k ; ++i150k ) |
486 | 150k | MIB.addUse(Ops[i]); |
487 | 52.0k | return MIB; |
488 | 52.0k | } |
489 | | |
490 | | MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<unsigned> Res, |
491 | 9.22k | unsigned Op) { |
492 | 9.22k | |
493 | | #ifndef NDEBUG |
494 | | assert(!Res.empty() && "invalid trivial sequence"); |
495 | | LLT Ty = MRI->getType(Res[0]); |
496 | | for (auto Reg : Res) |
497 | | assert(MRI->getType(Reg) == Ty && "type mismatch in input list"); |
498 | | assert(Res.size() * MRI->getType(Res[0]).getSizeInBits() == |
499 | | MRI->getType(Op).getSizeInBits() && |
500 | | "input operands do not cover output register"); |
501 | | #endif |
502 | | |
503 | 9.22k | MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_UNMERGE_VALUES); |
504 | 28.2k | for (unsigned i = 0; i < Res.size()28.2k ; ++i19.0k ) |
505 | 19.0k | MIB.addDef(Res[i]); |
506 | 9.22k | MIB.addUse(Op); |
507 | 9.22k | return MIB; |
508 | 9.22k | } |
509 | | |
510 | | MachineInstrBuilder MachineIRBuilder::buildInsert(unsigned Res, unsigned Src, |
511 | 18.3k | unsigned Op, unsigned Index) { |
512 | 18.3k | assert(Index + MRI->getType(Op).getSizeInBits() <= |
513 | 18.3k | MRI->getType(Res).getSizeInBits() && |
514 | 18.3k | "insertion past the end of a register"); |
515 | 18.3k | |
516 | 18.3k | if (MRI->getType(Res).getSizeInBits() == MRI->getType(Op).getSizeInBits()18.3k ) { |
517 | 2.62k | return buildCast(Res, Op); |
518 | 2.62k | } |
519 | 15.7k | |
520 | 15.7k | return buildInstr(TargetOpcode::G_INSERT) |
521 | 15.7k | .addDef(Res) |
522 | 15.7k | .addUse(Src) |
523 | 15.7k | .addUse(Op) |
524 | 15.7k | .addImm(Index); |
525 | 15.7k | } |
526 | | |
527 | | MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID, |
528 | | unsigned Res, |
529 | 143k | bool HasSideEffects) { |
530 | 143k | auto MIB = |
531 | 137k | buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS |
532 | 5.83k | : TargetOpcode::G_INTRINSIC); |
533 | 143k | if (Res) |
534 | 50.9k | MIB.addDef(Res); |
535 | 143k | MIB.addIntrinsicID(ID); |
536 | 143k | return MIB; |
537 | 143k | } |
538 | | |
539 | 236k | MachineInstrBuilder MachineIRBuilder::buildTrunc(unsigned Res, unsigned Op) { |
540 | 236k | validateTruncExt(Res, Op, false); |
541 | 236k | return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op); |
542 | 236k | } |
543 | | |
544 | 1 | MachineInstrBuilder MachineIRBuilder::buildFPTrunc(unsigned Res, unsigned Op) { |
545 | 1 | validateTruncExt(Res, Op, false); |
546 | 1 | return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op); |
547 | 1 | } |
548 | | |
549 | | MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred, |
550 | | unsigned Res, unsigned Op0, |
551 | 1.50M | unsigned Op1) { |
552 | | #ifndef NDEBUG |
553 | | assert(MRI->getType(Op0) == MRI->getType(Op0) && "type mismatch"); |
554 | | assert(CmpInst::isIntPredicate(Pred) && "invalid predicate"); |
555 | | if (MRI->getType(Op0).isScalar() || MRI->getType(Op0).isPointer()) |
556 | | assert(MRI->getType(Res).isScalar() && "type mismatch"); |
557 | | else |
558 | | assert(MRI->getType(Res).isVector() && |
559 | | MRI->getType(Res).getNumElements() == |
560 | | MRI->getType(Op0).getNumElements() && |
561 | | "type mismatch"); |
562 | | #endif |
563 | | |
564 | 1.50M | return buildInstr(TargetOpcode::G_ICMP) |
565 | 1.50M | .addDef(Res) |
566 | 1.50M | .addPredicate(Pred) |
567 | 1.50M | .addUse(Op0) |
568 | 1.50M | .addUse(Op1); |
569 | 1.50M | } |
570 | | |
571 | | MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred, |
572 | | unsigned Res, unsigned Op0, |
573 | 15.2k | unsigned Op1) { |
574 | | #ifndef NDEBUG |
575 | | assert((MRI->getType(Op0).isScalar() || MRI->getType(Op0).isVector()) && |
576 | | "invalid operand type"); |
577 | | assert(MRI->getType(Op0) == MRI->getType(Op1) && "type mismatch"); |
578 | | assert(CmpInst::isFPPredicate(Pred) && "invalid predicate"); |
579 | | if (MRI->getType(Op0).isScalar()) |
580 | | assert(MRI->getType(Res).isScalar() && "type mismatch"); |
581 | | else |
582 | | assert(MRI->getType(Res).isVector() && |
583 | | MRI->getType(Res).getNumElements() == |
584 | | MRI->getType(Op0).getNumElements() && |
585 | | "type mismatch"); |
586 | | #endif |
587 | | |
588 | 15.2k | return buildInstr(TargetOpcode::G_FCMP) |
589 | 15.2k | .addDef(Res) |
590 | 15.2k | .addPredicate(Pred) |
591 | 15.2k | .addUse(Op0) |
592 | 15.2k | .addUse(Op1); |
593 | 15.2k | } |
594 | | |
595 | | MachineInstrBuilder MachineIRBuilder::buildSelect(unsigned Res, unsigned Tst, |
596 | 71.2k | unsigned Op0, unsigned Op1) { |
597 | | #ifndef NDEBUG |
598 | | LLT ResTy = MRI->getType(Res); |
599 | | assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) && |
600 | | "invalid operand type"); |
601 | | assert(ResTy == MRI->getType(Op0) && ResTy == MRI->getType(Op1) && |
602 | | "type mismatch"); |
603 | | if (ResTy.isScalar() || ResTy.isPointer()) |
604 | | assert(MRI->getType(Tst).isScalar() && "type mismatch"); |
605 | | else |
606 | | assert((MRI->getType(Tst).isScalar() || |
607 | | (MRI->getType(Tst).isVector() && |
608 | | MRI->getType(Tst).getNumElements() == |
609 | | MRI->getType(Op0).getNumElements())) && |
610 | | "type mismatch"); |
611 | | #endif |
612 | | |
613 | 71.2k | return buildInstr(TargetOpcode::G_SELECT) |
614 | 71.2k | .addDef(Res) |
615 | 71.2k | .addUse(Tst) |
616 | 71.2k | .addUse(Op0) |
617 | 71.2k | .addUse(Op1); |
618 | 71.2k | } |
619 | | |
620 | | MachineInstrBuilder MachineIRBuilder::buildInsertVectorElement(unsigned Res, |
621 | | unsigned Val, |
622 | | unsigned Elt, |
623 | 3.42k | unsigned Idx) { |
624 | | #ifndef NDEBUG |
625 | | LLT ResTy = MRI->getType(Res); |
626 | | LLT ValTy = MRI->getType(Val); |
627 | | LLT EltTy = MRI->getType(Elt); |
628 | | LLT IdxTy = MRI->getType(Idx); |
629 | | assert(ResTy.isVector() && ValTy.isVector() && "invalid operand type"); |
630 | | assert(IdxTy.isScalar() && "invalid operand type"); |
631 | | assert(ResTy.getNumElements() == ValTy.getNumElements() && "type mismatch"); |
632 | | assert(ResTy.getElementType() == EltTy && "type mismatch"); |
633 | | #endif |
634 | | |
635 | 3.42k | return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT) |
636 | 3.42k | .addDef(Res) |
637 | 3.42k | .addUse(Val) |
638 | 3.42k | .addUse(Elt) |
639 | 3.42k | .addUse(Idx); |
640 | 3.42k | } |
641 | | |
642 | | MachineInstrBuilder MachineIRBuilder::buildExtractVectorElement(unsigned Res, |
643 | | unsigned Val, |
644 | 3.58k | unsigned Idx) { |
645 | | #ifndef NDEBUG |
646 | | LLT ResTy = MRI->getType(Res); |
647 | | LLT ValTy = MRI->getType(Val); |
648 | | LLT IdxTy = MRI->getType(Idx); |
649 | | assert(ValTy.isVector() && "invalid operand type"); |
650 | | assert((ResTy.isScalar() || ResTy.isPointer()) && "invalid operand type"); |
651 | | assert(IdxTy.isScalar() && "invalid operand type"); |
652 | | assert(ValTy.getElementType() == ResTy && "type mismatch"); |
653 | | #endif |
654 | | |
655 | 3.58k | return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT) |
656 | 3.58k | .addDef(Res) |
657 | 3.58k | .addUse(Val) |
658 | 3.58k | .addUse(Idx); |
659 | 3.58k | } |
660 | | |
661 | | void MachineIRBuilder::validateTruncExt(unsigned Dst, unsigned Src, |
662 | 410k | bool IsExtend) { |
663 | | #ifndef NDEBUG |
664 | | LLT SrcTy = MRI->getType(Src); |
665 | | LLT DstTy = MRI->getType(Dst); |
666 | | |
667 | | if (DstTy.isVector()) { |
668 | | assert(SrcTy.isVector() && "mismatched cast between vecot and non-vector"); |
669 | | assert(SrcTy.getNumElements() == DstTy.getNumElements() && |
670 | | "different number of elements in a trunc/ext"); |
671 | | } else |
672 | | assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc"); |
673 | | |
674 | | if (IsExtend) |
675 | | assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() && |
676 | | "invalid narrowing extend"); |
677 | | else |
678 | | assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() && |
679 | | "invalid widening trunc"); |
680 | | #endif |
681 | | } |