Coverage Report

Created: 2017-10-03 07:32

/Users/buildslave/jenkins/sharedspace/clang-stage2-coverage-R@2/llvm/lib/CodeGen/RegisterScavenging.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
2
//
3
//                     The LLVM Compiler Infrastructure
4
//
5
// This file is distributed under the University of Illinois Open Source
6
// License. See LICENSE.TXT for details.
7
//
8
//===----------------------------------------------------------------------===//
9
//
10
/// \file
11
/// This file implements the machine register scavenger. It can provide
12
/// information, such as unused registers, at any point in a machine basic
13
/// block. It also provides a mechanism to make registers available by evicting
14
/// them to spill slots.
15
//
16
//===----------------------------------------------------------------------===//
17
18
#include "llvm/CodeGen/RegisterScavenging.h"
19
#include "llvm/ADT/ArrayRef.h"
20
#include "llvm/ADT/BitVector.h"
21
#include "llvm/ADT/SmallVector.h"
22
#include "llvm/ADT/Statistic.h"
23
#include "llvm/CodeGen/LiveRegUnits.h"
24
#include "llvm/CodeGen/MachineBasicBlock.h"
25
#include "llvm/CodeGen/MachineFrameInfo.h"
26
#include "llvm/CodeGen/MachineFunction.h"
27
#include "llvm/CodeGen/MachineFunctionPass.h"
28
#include "llvm/CodeGen/MachineInstr.h"
29
#include "llvm/CodeGen/MachineOperand.h"
30
#include "llvm/CodeGen/MachineRegisterInfo.h"
31
#include "llvm/MC/MCRegisterInfo.h"
32
#include "llvm/Pass.h"
33
#include "llvm/Support/Debug.h"
34
#include "llvm/Support/ErrorHandling.h"
35
#include "llvm/Support/raw_ostream.h"
36
#include "llvm/Target/TargetFrameLowering.h"
37
#include "llvm/Target/TargetInstrInfo.h"
38
#include "llvm/Target/TargetRegisterInfo.h"
39
#include "llvm/Target/TargetSubtargetInfo.h"
40
#include <algorithm>
41
#include <cassert>
42
#include <iterator>
43
#include <limits>
44
#include <string>
45
#include <utility>
46
47
using namespace llvm;
48
49
#define DEBUG_TYPE "reg-scavenging"
50
51
STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
52
53
6.87k
void RegScavenger::setRegUsed(unsigned Reg, LaneBitmask LaneMask) {
54
6.87k
  LiveUnits.addRegMasked(Reg, LaneMask);
55
6.87k
}
56
57
48.9k
void RegScavenger::init(MachineBasicBlock &MBB) {
58
48.9k
  MachineFunction &MF = *MBB.getParent();
59
48.9k
  TII = MF.getSubtarget().getInstrInfo();
60
48.9k
  TRI = MF.getSubtarget().getRegisterInfo();
61
48.9k
  MRI = &MF.getRegInfo();
62
48.9k
  LiveUnits.init(*TRI);
63
48.9k
64
48.9k
  assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
65
48.9k
         "Target changed?");
66
48.9k
67
48.9k
  // Self-initialize.
68
48.9k
  if (
!this->MBB48.9k
) {
69
1.38k
    NumRegUnits = TRI->getNumRegUnits();
70
1.38k
    KillRegUnits.resize(NumRegUnits);
71
1.38k
    DefRegUnits.resize(NumRegUnits);
72
1.38k
    TmpRegUnits.resize(NumRegUnits);
73
1.38k
  }
74
48.9k
  this->MBB = &MBB;
75
48.9k
76
31.1k
  for (ScavengedInfo &SI : Scavenged) {
77
31.1k
    SI.Reg = 0;
78
31.1k
    SI.Restore = nullptr;
79
31.1k
  }
80
48.9k
81
48.9k
  Tracking = false;
82
48.9k
}
83
84
481
void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
85
481
  init(MBB);
86
481
  LiveUnits.addLiveIns(MBB);
87
481
}
88
89
48.4k
void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
90
48.4k
  init(MBB);
91
48.4k
  LiveUnits.addLiveOuts(MBB);
92
48.4k
93
48.4k
  // Move internal iterator at the last instruction of the block.
94
48.4k
  if (
MBB.begin() != MBB.end()48.4k
) {
95
48.4k
    MBBI = std::prev(MBB.end());
96
48.4k
    Tracking = true;
97
48.4k
  }
98
48.4k
}
99
100
5.75k
void RegScavenger::addRegUnits(BitVector &BV, unsigned Reg) {
101
13.6k
  for (MCRegUnitIterator RUI(Reg, TRI); 
RUI.isValid()13.6k
;
++RUI7.90k
)
102
7.90k
    BV.set(*RUI);
103
5.75k
}
104
105
0
void RegScavenger::removeRegUnits(BitVector &BV, unsigned Reg) {
106
0
  for (MCRegUnitIterator RUI(Reg, TRI); 
RUI.isValid()0
;
++RUI0
)
107
0
    BV.reset(*RUI);
108
0
}
109
110
4.96k
void RegScavenger::determineKillsAndDefs() {
111
4.96k
  assert(Tracking && "Must be tracking to determine kills and defs");
112
4.96k
113
4.96k
  MachineInstr &MI = *MBBI;
114
4.96k
  assert(!MI.isDebugValue() && "Debug values have no kills or defs");
115
4.96k
116
4.96k
  // Find out which registers are early clobbered, killed, defined, and marked
117
4.96k
  // def-dead in this instruction.
118
4.96k
  KillRegUnits.reset();
119
4.96k
  DefRegUnits.reset();
120
17.8k
  for (const MachineOperand &MO : MI.operands()) {
121
17.8k
    if (
MO.isRegMask()17.8k
) {
122
42
      TmpRegUnits.clear();
123
5.40k
      for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); 
RU != RUEnd5.40k
;
++RU5.35k
) {
124
7.06k
        for (MCRegUnitRootIterator RURI(RU, TRI); 
RURI.isValid()7.06k
;
++RURI1.70k
) {
125
5.35k
          if (
MO.clobbersPhysReg(*RURI)5.35k
) {
126
3.65k
            TmpRegUnits.set(RU);
127
3.65k
            break;
128
3.65k
          }
129
5.35k
        }
130
5.35k
      }
131
42
132
42
      // Apply the mask.
133
42
      KillRegUnits |= TmpRegUnits;
134
42
    }
135
17.8k
    if (!MO.isReg())
136
5.59k
      continue;
137
12.2k
    unsigned Reg = MO.getReg();
138
12.2k
    if (
!TargetRegisterInfo::isPhysicalRegister(Reg) || 12.2k
isReserved(Reg)12.0k
)
139
4.59k
      continue;
140
7.61k
141
7.61k
    
if (7.61k
MO.isUse()7.61k
) {
142
4.02k
      // Ignore undef uses.
143
4.02k
      if (MO.isUndef())
144
41
        continue;
145
3.98k
      
if (3.98k
MO.isKill()3.98k
)
146
2.15k
        addRegUnits(KillRegUnits, Reg);
147
7.61k
    } else {
148
3.59k
      assert(MO.isDef());
149
3.59k
      if (MO.isDead())
150
561
        addRegUnits(KillRegUnits, Reg);
151
3.59k
      else
152
3.03k
        addRegUnits(DefRegUnits, Reg);
153
3.59k
    }
154
17.8k
  }
155
4.96k
}
156
157
0
void RegScavenger::unprocess() {
158
0
  assert(Tracking && "Cannot unprocess because we're not tracking");
159
0
160
0
  MachineInstr &MI = *MBBI;
161
0
  if (
!MI.isDebugValue()0
) {
162
0
    determineKillsAndDefs();
163
0
164
0
    // Commit the changes.
165
0
    setUsed(KillRegUnits);
166
0
    setUnused(DefRegUnits);
167
0
  }
168
0
169
0
  if (
MBBI == MBB->begin()0
) {
170
0
    MBBI = MachineBasicBlock::iterator(nullptr);
171
0
    Tracking = false;
172
0
  } else
173
0
    --MBBI;
174
0
}
175
176
4.96k
void RegScavenger::forward() {
177
4.96k
  // Move ptr forward.
178
4.96k
  if (
!Tracking4.96k
) {
179
403
    MBBI = MBB->begin();
180
403
    Tracking = true;
181
4.96k
  } else {
182
4.55k
    assert(MBBI != MBB->end() && "Already past the end of the basic block!");
183
4.55k
    MBBI = std::next(MBBI);
184
4.55k
  }
185
4.96k
  assert(MBBI != MBB->end() && "Already at the end of the basic block!");
186
4.96k
187
4.96k
  MachineInstr &MI = *MBBI;
188
4.96k
189
4.96k
  for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(),
190
6.30k
         IE = Scavenged.end(); 
I != IE6.30k
;
++I1.34k
) {
191
1.34k
    if (I->Restore != &MI)
192
1.33k
      continue;
193
3
194
3
    I->Reg = 0;
195
3
    I->Restore = nullptr;
196
3
  }
197
4.96k
198
4.96k
  if (MI.isDebugValue())
199
0
    return;
200
4.96k
201
4.96k
  determineKillsAndDefs();
202
4.96k
203
4.96k
  // Verify uses and defs.
204
#ifndef NDEBUG
205
  for (const MachineOperand &MO : MI.operands()) {
206
    if (!MO.isReg())
207
      continue;
208
    unsigned Reg = MO.getReg();
209
    if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg))
210
      continue;
211
    if (MO.isUse()) {
212
      if (MO.isUndef())
213
        continue;
214
      if (!isRegUsed(Reg)) {
215
        // Check if it's partial live: e.g.
216
        // D0 = insert_subreg D0<undef>, S0
217
        // ... D0
218
        // The problem is the insert_subreg could be eliminated. The use of
219
        // D0 is using a partially undef value. This is not *incorrect* since
220
        // S1 is can be freely clobbered.
221
        // Ideally we would like a way to model this, but leaving the
222
        // insert_subreg around causes both correctness and performance issues.
223
        bool SubUsed = false;
224
        for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
225
          if (isRegUsed(*SubRegs)) {
226
            SubUsed = true;
227
            break;
228
          }
229
        bool SuperUsed = false;
230
        for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
231
          if (isRegUsed(*SR)) {
232
            SuperUsed = true;
233
            break;
234
          }
235
        }
236
        if (!SubUsed && !SuperUsed) {
237
          MBB->getParent()->verify(nullptr, "In Register Scavenger");
238
          llvm_unreachable("Using an undefined register!");
239
        }
240
        (void)SubUsed;
241
        (void)SuperUsed;
242
      }
243
    } else {
244
      assert(MO.isDef());
245
#if 0
246
      // FIXME: Enable this once we've figured out how to correctly transfer
247
      // implicit kills during codegen passes like the coalescer.
248
      assert((KillRegs.test(Reg) || isUnused(Reg) ||
249
              isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
250
             "Re-defining a live register!");
251
#endif
252
    }
253
  }
254
#endif // NDEBUG
255
256
4.96k
  // Commit the changes.
257
4.96k
  setUnused(KillRegUnits);
258
4.96k
  setUsed(DefRegUnits);
259
4.96k
}
260
261
253k
void RegScavenger::backward() {
262
253k
  assert(Tracking && "Must be tracking to determine kills and defs");
263
253k
264
253k
  const MachineInstr &MI = *MBBI;
265
253k
  LiveUnits.stepBackward(MI);
266
253k
267
253k
  // Expire scavenge spill frameindex uses.
268
150k
  for (ScavengedInfo &I : Scavenged) {
269
150k
    if (
I.Restore == &MI150k
) {
270
21
      I.Reg = 0;
271
21
      I.Restore = nullptr;
272
21
    }
273
150k
  }
274
253k
275
253k
  if (
MBBI == MBB->begin()253k
) {
276
0
    MBBI = MachineBasicBlock::iterator(nullptr);
277
0
    Tracking = false;
278
0
  } else
279
253k
    --MBBI;
280
253k
}
281
282
2.66k
bool RegScavenger::isRegUsed(unsigned Reg, bool includeReserved) const {
283
2.66k
  if (isReserved(Reg))
284
468
    return includeReserved;
285
2.19k
  return !LiveUnits.available(Reg);
286
2.19k
}
287
288
0
unsigned RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
289
0
  for (unsigned Reg : *RC) {
290
0
    if (
!isRegUsed(Reg)0
) {
291
0
      DEBUG(dbgs() << "Scavenger found unused reg: " << TRI->getName(Reg) <<
292
0
            "\n");
293
0
      return Reg;
294
0
    }
295
0
  }
296
0
  return 0;
297
0
}
298
299
66
BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
300
66
  BitVector Mask(TRI->getNumRegs());
301
66
  for (unsigned Reg : *RC)
302
2.33k
    
if (2.33k
!isRegUsed(Reg)2.33k
)
303
1.54k
      Mask.set(Reg);
304
66
  return Mask;
305
66
}
306
307
unsigned RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
308
                                       BitVector &Candidates,
309
                                       unsigned InstrLimit,
310
63
                                       MachineBasicBlock::iterator &UseMI) {
311
63
  int Survivor = Candidates.find_first();
312
63
  assert(Survivor > 0 && "No candidates for scavenging");
313
63
314
63
  MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
315
63
  assert(StartMI != ME && "MI already at terminator");
316
63
  MachineBasicBlock::iterator RestorePointMI = StartMI;
317
63
  MachineBasicBlock::iterator MI = StartMI;
318
63
319
63
  bool inVirtLiveRange = false;
320
323
  for (++MI; 
InstrLimit > 0 && 323
MI != ME323
;
++MI, --InstrLimit260
) {
321
276
    if (
MI->isDebugValue()276
) {
322
0
      ++InstrLimit; // Don't count debug instructions
323
0
      continue;
324
0
    }
325
276
    bool isVirtKillInsn = false;
326
276
    bool isVirtDefInsn = false;
327
276
    // Remove any candidates touched by instruction.
328
870
    for (const MachineOperand &MO : MI->operands()) {
329
870
      if (MO.isRegMask())
330
0
        Candidates.clearBitsNotInMask(MO.getRegMask());
331
870
      if (
!MO.isReg() || 870
MO.isUndef()551
||
!MO.getReg()551
)
332
319
        continue;
333
551
      
if (551
TargetRegisterInfo::isVirtualRegister(MO.getReg())551
) {
334
116
        if (MO.isDef())
335
58
          isVirtDefInsn = true;
336
58
        else 
if (58
MO.isKill()58
)
337
0
          isVirtKillInsn = true;
338
116
        continue;
339
116
      }
340
870
      
for (MCRegAliasIterator AI(MO.getReg(), TRI, true); 435
AI.isValid()870
;
++AI435
)
341
435
        Candidates.reset(*AI);
342
870
    }
343
276
    // If we're not in a virtual reg's live range, this is a valid
344
276
    // restore point.
345
276
    if (
!inVirtLiveRange276
)
RestorePointMI = MI247
;
346
276
347
276
    // Update whether we're in the live range of a virtual register
348
276
    if (
isVirtKillInsn276
)
inVirtLiveRange = false0
;
349
276
    if (
isVirtDefInsn276
)
inVirtLiveRange = true58
;
350
276
351
276
    // Was our survivor untouched by this instruction?
352
276
    if (Candidates.test(Survivor))
353
216
      continue;
354
60
355
60
    // All candidates gone?
356
60
    
if (60
Candidates.none()60
)
357
16
      break;
358
44
359
44
    Survivor = Candidates.find_first();
360
44
  }
361
63
  // If we ran off the end, that's where we want to restore.
362
63
  if (
MI == ME63
)
RestorePointMI = ME47
;
363
63
  assert(RestorePointMI != StartMI &&
364
63
         "No available scavenger restore location!");
365
63
366
63
  // We ran out of candidates, so stop the search.
367
63
  UseMI = RestorePointMI;
368
63
  return Survivor;
369
63
}
370
371
/// Given the bitvector \p Available of free register units at position
372
/// \p From. Search backwards to find a register that is part of \p
373
/// Candidates and not used/clobbered until the point \p To. If there is
374
/// multiple candidates continue searching and pick the one that is not used/
375
/// clobbered for the longest time.
376
/// Returns the register and the earliest position we know it to be free or
377
/// the position MBB.end() if no register is available.
378
static std::pair<MCPhysReg, MachineBasicBlock::iterator>
379
findSurvivorBackwards(const MachineRegisterInfo &MRI,
380
    MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
381
    const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
382
6.83k
    bool RestoreAfter) {
383
6.83k
  bool FoundTo = false;
384
6.83k
  MCPhysReg Survivor = 0;
385
6.83k
  MachineBasicBlock::iterator Pos;
386
6.83k
  MachineBasicBlock &MBB = *From->getParent();
387
6.83k
  unsigned InstrLimit = 25;
388
6.83k
  unsigned InstrCountDown = InstrLimit;
389
6.83k
  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
390
6.83k
  LiveRegUnits Used(TRI);
391
6.83k
392
6.83k
  for (MachineBasicBlock::iterator I = From;; 
--I697
) {
393
7.52k
    const MachineInstr &MI = *I;
394
7.52k
395
7.52k
    Used.accumulate(MI);
396
7.52k
397
7.52k
    if (
I == To7.52k
) {
398
6.83k
      // See if one of the registers in RC wasn't used so far.
399
12.8k
      for (MCPhysReg Reg : AllocationOrder) {
400
12.8k
        if (
!MRI.isReserved(Reg) && 12.8k
Used.available(Reg)12.1k
&&
401
12.1k
            LiveOut.available(Reg))
402
6.80k
          return std::make_pair(Reg, MBB.end());
403
23
      }
404
23
      // Otherwise we will continue up to InstrLimit instructions to find
405
23
      // the register which is not defined/used for the longest time.
406
23
      FoundTo = true;
407
23
      Pos = To;
408
23
      // Note: It was fine so far to start our search at From, however now that
409
23
      // we have to spill, and can only place the restore after From then
410
23
      // add the regs used/defed by std::next(From) to the set.
411
23
      if (RestoreAfter)
412
23
        Used.accumulate(*std::next(From));
413
6.83k
    }
414
720
    
if (720
FoundTo720
) {
415
365
      if (
Survivor == 0 || 365
!Used.available(Survivor)342
) {
416
48
        MCPhysReg AvilableReg = 0;
417
287
        for (MCPhysReg Reg : AllocationOrder) {
418
287
          if (
!MRI.isReserved(Reg) && 287
Used.available(Reg)262
) {
419
39
            AvilableReg = Reg;
420
39
            break;
421
39
          }
422
48
        }
423
48
        if (AvilableReg == 0)
424
9
          break;
425
39
        Survivor = AvilableReg;
426
39
      }
427
356
      
if (356
--InstrCountDown == 0356
)
428
4
        break;
429
352
430
352
      // Keep searching when we find a vreg since the spilled register will
431
352
      // be usefull for this other vreg as well later.
432
352
      bool FoundVReg = false;
433
905
      for (const MachineOperand &MO : MI.operands()) {
434
905
        if (
MO.isReg() && 905
TargetRegisterInfo::isVirtualRegister(MO.getReg())683
) {
435
45
          FoundVReg = true;
436
45
          break;
437
45
        }
438
352
      }
439
352
      if (
FoundVReg352
) {
440
45
        InstrCountDown = InstrLimit;
441
45
        Pos = I;
442
45
      }
443
352
      if (I == MBB.begin())
444
10
        break;
445
365
    }
446
7.52k
  }
447
6.83k
448
23
  return std::make_pair(Survivor, Pos);
449
6.83k
}
450
451
48
static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
452
48
  unsigned i = 0;
453
100
  while (
!MI.getOperand(i).isFI()100
) {
454
52
    ++i;
455
52
    assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
456
52
  }
457
48
  return i;
458
48
}
459
460
RegScavenger::ScavengedInfo &
461
RegScavenger::spill(unsigned Reg, const TargetRegisterClass &RC, int SPAdj,
462
                    MachineBasicBlock::iterator Before,
463
27
                    MachineBasicBlock::iterator &UseMI) {
464
27
  // Find an available scavenging slot with size and alignment matching
465
27
  // the requirements of the class RC.
466
27
  const MachineFunction &MF = *Before->getParent()->getParent();
467
27
  const MachineFrameInfo &MFI = MF.getFrameInfo();
468
27
  unsigned NeedSize = TRI->getSpillSize(RC);
469
27
  unsigned NeedAlign = TRI->getSpillAlignment(RC);
470
27
471
27
  unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
472
27
  int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
473
68
  for (unsigned I = 0; 
I < Scavenged.size()68
;
++I41
) {
474
41
    if (Scavenged[I].Reg != 0)
475
1
      continue;
476
40
    // Verify that this slot is valid for this register.
477
40
    int FI = Scavenged[I].FrameIndex;
478
40
    if (
FI < FIB || 40
FI >= FIE40
)
479
1
      continue;
480
39
    unsigned S = MFI.getObjectSize(FI);
481
39
    unsigned A = MFI.getObjectAlignment(FI);
482
39
    if (
NeedSize > S || 39
NeedAlign > A39
)
483
0
      continue;
484
39
    // Avoid wasting slots with large size and/or large alignment. Pick one
485
39
    // that is the best fit for this register class (in street metric).
486
39
    // Picking a larger slot than necessary could happen if a slot for a
487
39
    // larger register is reserved before a slot for a smaller one. When
488
39
    // trying to spill a smaller register, the large slot would be found
489
39
    // first, thus making it impossible to spill the larger register later.
490
39
    unsigned D = (S-NeedSize) + (A-NeedAlign);
491
39
    if (
D < Diff39
) {
492
24
      SI = I;
493
24
      Diff = D;
494
24
    }
495
41
  }
496
27
497
27
  if (
SI == Scavenged.size()27
) {
498
3
    // We need to scavenge a register but have no spill slot, the target
499
3
    // must know how to do it (if not, we'll assert below).
500
3
    Scavenged.push_back(ScavengedInfo(FIE));
501
3
  }
502
27
503
27
  // Avoid infinite regress
504
27
  Scavenged[SI].Reg = Reg;
505
27
506
27
  // If the target knows how to save/restore the register, let it do so;
507
27
  // otherwise, use the emergency stack spill slot.
508
27
  if (
!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)27
) {
509
25
    // Spill the scavenged register before \p Before.
510
25
    int FI = Scavenged[SI].FrameIndex;
511
25
    if (
FI < FIB || 25
FI >= FIE25
) {
512
1
      std::string Msg = std::string("Error while trying to spill ") +
513
1
          TRI->getName(Reg) + " from class " + TRI->getRegClassName(&RC) +
514
1
          ": Cannot scavenge register without an emergency spill slot!";
515
1
      report_fatal_error(Msg.c_str());
516
1
    }
517
24
    TII->storeRegToStackSlot(*MBB, Before, Reg, true, Scavenged[SI].FrameIndex,
518
24
                             &RC, TRI);
519
24
    MachineBasicBlock::iterator II = std::prev(Before);
520
24
521
24
    unsigned FIOperandNum = getFrameIndexOperandNum(*II);
522
24
    TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
523
24
524
24
    // Restore the scavenged register before its use (or first terminator).
525
24
    TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex,
526
24
                              &RC, TRI);
527
24
    II = std::prev(UseMI);
528
24
529
24
    FIOperandNum = getFrameIndexOperandNum(*II);
530
24
    TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
531
24
  }
532
26
  return Scavenged[SI];
533
27
}
534
535
unsigned RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
536
                                        MachineBasicBlock::iterator I,
537
63
                                        int SPAdj) {
538
63
  MachineInstr &MI = *I;
539
63
  const MachineFunction &MF = *MI.getParent()->getParent();
540
63
  // Consider all allocatable registers in the register class initially
541
63
  BitVector Candidates = TRI->getAllocatableSet(MF, RC);
542
63
543
63
  // Exclude all the registers being used by the instruction.
544
131
  for (const MachineOperand &MO : MI.operands()) {
545
131
    if (
MO.isReg() && 131
MO.getReg() != 063
&&
!(MO.isUse() && 63
MO.isUndef()20
) &&
546
63
        !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
547
68
      
for (MCRegAliasIterator AI(MO.getReg(), TRI, true); 34
AI.isValid()68
;
++AI34
)
548
34
        Candidates.reset(*AI);
549
131
  }
550
63
551
63
  // Try to find a register that's unused if there is one, as then we won't
552
63
  // have to spill.
553
63
  BitVector Available = getRegsAvailable(RC);
554
63
  Available &= Candidates;
555
63
  if (Available.any())
556
59
    Candidates = Available;
557
63
558
63
  // Find the register whose use is furthest away.
559
63
  MachineBasicBlock::iterator UseMI;
560
63
  unsigned SReg = findSurvivorReg(I, Candidates, 25, UseMI);
561
63
562
63
  // If we found an unused register there is no reason to spill it.
563
63
  if (
!isRegUsed(SReg)63
) {
564
59
    DEBUG(dbgs() << "Scavenged register: " << TRI->getName(SReg) << "\n");
565
59
    return SReg;
566
59
  }
567
4
568
4
  ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
569
4
  Scavenged.Restore = &*std::prev(UseMI);
570
4
571
4
  DEBUG(dbgs() << "Scavenged register (with spill): " << TRI->getName(SReg) <<
572
63
        "\n");
573
63
574
63
  return SReg;
575
63
}
576
577
unsigned RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
578
                                                 MachineBasicBlock::iterator To,
579
6.83k
                                                 bool RestoreAfter, int SPAdj) {
580
6.83k
  const MachineBasicBlock &MBB = *To->getParent();
581
6.83k
  const MachineFunction &MF = *MBB.getParent();
582
6.83k
583
6.83k
  // Find the register whose use is furthest away.
584
6.83k
  MachineBasicBlock::iterator UseMI;
585
6.83k
  ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
586
6.83k
  std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
587
6.83k
      findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
588
6.83k
                            RestoreAfter);
589
6.83k
  MCPhysReg Reg = P.first;
590
6.83k
  MachineBasicBlock::iterator SpillBefore = P.second;
591
6.83k
  assert(Reg != 0 && "No register left to scavenge!");
592
6.83k
  // Found an available register?
593
6.83k
  if (
SpillBefore != MBB.end()6.83k
) {
594
23
    MachineBasicBlock::iterator ReloadAfter =
595
23
      RestoreAfter ? 
std::next(MBBI)23
:
MBBI0
;
596
23
    MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
597
23
    DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
598
23
    ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
599
23
    Scavenged.Restore = &*std::prev(SpillBefore);
600
23
    LiveUnits.removeReg(Reg);
601
23
    DEBUG(dbgs() << "Scavenged register with spill: " << PrintReg(Reg, TRI)
602
23
          << " until " << *SpillBefore);
603
6.83k
  } else {
604
6.80k
    DEBUG(dbgs() << "Scavenged free register: " << PrintReg(Reg, TRI) << '\n');
605
6.80k
  }
606
6.83k
  return Reg;
607
6.83k
}
608
609
/// Allocate a register for the virtual register \p VReg. The last use of
610
/// \p VReg is around the current position of the register scavenger \p RS.
611
/// \p ReserveAfter controls whether the scavenged register needs to be reserved
612
/// after the current instruction, otherwise it will only be reserved before the
613
/// current instruction.
614
static unsigned scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
615
6.83k
                             unsigned VReg, bool ReserveAfter) {
616
6.83k
  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
617
#ifndef NDEBUG
618
  // Verify that all definitions and uses are in the same basic block.
619
  const MachineBasicBlock *CommonMBB = nullptr;
620
  // Real definition for the reg, re-definitions are not considered.
621
  const MachineInstr *RealDef = nullptr;
622
  for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
623
    MachineBasicBlock *MBB = MO.getParent()->getParent();
624
    if (CommonMBB == nullptr)
625
      CommonMBB = MBB;
626
    assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
627
    if (MO.isDef()) {
628
      const MachineInstr &MI = *MO.getParent();
629
      if (!MI.readsRegister(VReg, &TRI)) {
630
        assert((!RealDef || RealDef == &MI) &&
631
               "Can have at most one definition which is not a redefinition");
632
        RealDef = &MI;
633
      }
634
    }
635
  }
636
  assert(RealDef != nullptr && "Must have at least 1 Def");
637
#endif
638
639
6.83k
  // We should only have one definition of the register. However to accommodate
640
6.83k
  // the requirements of two address code we also allow definitions in
641
6.83k
  // subsequent instructions provided they also read the register. That way
642
6.83k
  // we get a single contiguous lifetime.
643
6.83k
  //
644
6.83k
  // Definitions in MRI.def_begin() are unordered, search for the first.
645
6.83k
  MachineRegisterInfo::def_iterator FirstDef =
646
6.83k
    std::find_if(MRI.def_begin(VReg), MRI.def_end(),
647
7.06k
                 [VReg, &TRI](const MachineOperand &MO) {
648
7.06k
      return !MO.getParent()->readsRegister(VReg, &TRI);
649
7.06k
    });
650
6.83k
  assert(FirstDef != MRI.def_end() &&
651
6.83k
         "Must have one definition that does not redefine vreg");
652
6.83k
  MachineInstr &DefMI = *FirstDef->getParent();
653
6.83k
654
6.83k
  // The register scavenger will report a free register inserting an emergency
655
6.83k
  // spill/reload if necessary.
656
6.83k
  int SPAdj = 0;
657
6.83k
  const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
658
6.83k
  unsigned SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
659
6.83k
                                               ReserveAfter, SPAdj);
660
6.83k
  MRI.replaceRegWith(VReg, SReg);
661
6.83k
  ++NumScavengedRegs;
662
6.83k
  return SReg;
663
6.83k
}
664
665
/// Allocate (scavenge) vregs inside a single basic block.
666
/// Returns true if the target spill callback created new vregs and a 2nd pass
667
/// is necessary.
668
static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
669
                                            RegScavenger &RS,
670
48.4k
                                            MachineBasicBlock &MBB) {
671
48.4k
  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
672
48.4k
  RS.enterBasicBlockEnd(MBB);
673
48.4k
674
48.4k
  unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
675
48.4k
  bool NextInstructionReadsVReg = false;
676
349k
  for (MachineBasicBlock::iterator I = MBB.end(); 
I != MBB.begin()349k
; ) {
677
301k
    --I;
678
301k
    // Move RegScavenger to the position between *I and *std::next(I).
679
301k
    RS.backward(I);
680
301k
681
301k
    // Look for unassigned vregs in the uses of *std::next(I).
682
301k
    if (
NextInstructionReadsVReg301k
) {
683
6.79k
      MachineBasicBlock::iterator N = std::next(I);
684
6.79k
      const MachineInstr &NMI = *N;
685
21.8k
      for (const MachineOperand &MO : NMI.operands()) {
686
21.8k
        if (!MO.isReg())
687
4.78k
          continue;
688
17.1k
        unsigned Reg = MO.getReg();
689
17.1k
        // We only care about virtual registers and ignore virtual registers
690
17.1k
        // created by the target callbacks in the process (those will be handled
691
17.1k
        // in a scavenging round).
692
17.1k
        if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
693
6.81k
            TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs)
694
10.3k
          continue;
695
6.81k
        
if (6.81k
!MO.readsReg()6.81k
)
696
0
          continue;
697
6.81k
698
6.81k
        unsigned SReg = scavengeVReg(MRI, RS, Reg, true);
699
6.81k
        N->addRegisterKilled(SReg, &TRI, false);
700
6.81k
        RS.setRegUsed(SReg);
701
6.81k
      }
702
6.79k
    }
703
301k
704
301k
    // Look for unassigned vregs in the defs of *I.
705
301k
    NextInstructionReadsVReg = false;
706
301k
    const MachineInstr &MI = *I;
707
980k
    for (const MachineOperand &MO : MI.operands()) {
708
980k
      if (!MO.isReg())
709
374k
        continue;
710
605k
      unsigned Reg = MO.getReg();
711
605k
      // Only vregs, no newly created vregs (see above).
712
605k
      if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
713
6.83k
          TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs)
714
599k
        continue;
715
6.83k
      // We have to look at all operands anyway so we can precalculate here
716
6.83k
      // whether there is a reading operand. This allows use to skip the use
717
6.83k
      // step in the next iteration if there was none.
718
605k
      assert(!MO.isInternalRead() && "Cannot assign inside bundles");
719
6.83k
      assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
720
6.83k
      if (
MO.readsReg()6.83k
) {
721
6.81k
        NextInstructionReadsVReg = true;
722
6.81k
      }
723
6.83k
      if (
MO.isDef()6.83k
) {
724
22
        unsigned SReg = scavengeVReg(MRI, RS, Reg, false);
725
22
        I->addRegisterDead(SReg, &TRI, false);
726
22
      }
727
980k
    }
728
301k
  }
729
#ifndef NDEBUG
730
  for (const MachineOperand &MO : MBB.front().operands()) {
731
    if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
732
      continue;
733
    assert(!MO.isInternalRead() && "Cannot assign inside bundles");
734
    assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
735
    assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
736
  }
737
#endif
738
739
48.4k
  return MRI.getNumVirtRegs() != InitialNumVirtRegs;
740
48.4k
}
741
742
503k
void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
743
503k
  // FIXME: Iterating over the instruction stream is unnecessary. We can simply
744
503k
  // iterate over the vreg use list, which at this point only contains machine
745
503k
  // operands for which eliminateFrameIndex need a new scratch reg.
746
503k
  MachineRegisterInfo &MRI = MF.getRegInfo();
747
503k
  // Shortcut.
748
503k
  if (
MRI.getNumVirtRegs() == 0503k
) {
749
502k
    MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
750
502k
    return;
751
502k
  }
752
1.04k
753
1.04k
  // Run through the instructions and find any virtual registers.
754
1.04k
  
for (MachineBasicBlock &MBB : MF) 1.04k
{
755
48.5k
    if (MBB.empty())
756
139
      continue;
757
48.4k
758
48.4k
    bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
759
48.4k
    if (
Again48.4k
) {
760
0
      DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
761
0
            << MBB.getName() << '\n');
762
0
      Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
763
0
      // The target required a 2nd run (because it created new vregs while
764
0
      // spilling). Refuse to do another pass to keep compiletime in check.
765
0
      if (Again)
766
0
        report_fatal_error("Incomplete scavenging after 2nd pass");
767
1.04k
    }
768
48.5k
  }
769
1.04k
770
1.04k
  MRI.clearVirtRegs();
771
1.04k
  MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
772
1.04k
}
773
774
namespace {
775
776
/// This class runs register scavenging independ of the PrologEpilogInserter.
777
/// This is used in for testing.
778
class ScavengerTest : public MachineFunctionPass {
779
public:
780
  static char ID;
781
782
3
  ScavengerTest() : MachineFunctionPass(ID) {}
783
784
7
  bool runOnMachineFunction(MachineFunction &MF) override {
785
7
    const TargetSubtargetInfo &STI = MF.getSubtarget();
786
7
    const TargetFrameLowering &TFL = *STI.getFrameLowering();
787
7
788
7
    RegScavenger RS;
789
7
    // Let's hope that calling those outside of PrologEpilogueInserter works
790
7
    // well enough to initialize the scavenger with some emergency spillslots
791
7
    // for the target.
792
7
    BitVector SavedRegs;
793
7
    TFL.determineCalleeSaves(MF, SavedRegs, &RS);
794
7
    TFL.processFunctionBeforeFrameFinalized(MF, &RS);
795
7
796
7
    // Let's scavenge the current function
797
7
    scavengeFrameVirtualRegs(MF, RS);
798
7
    return true;
799
7
  }
800
};
801
802
} // end anonymous namespace
803
804
char ScavengerTest::ID;
805
806
INITIALIZE_PASS(ScavengerTest, "scavenger-test",
807
                "Scavenge virtual registers inside basic blocks", false, false)