Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/CodeGen/RegisterScavenging.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
/// \file
10
/// This file implements the machine register scavenger. It can provide
11
/// information, such as unused registers, at any point in a machine basic
12
/// block. It also provides a mechanism to make registers available by evicting
13
/// them to spill slots.
14
//
15
//===----------------------------------------------------------------------===//
16
17
#include "llvm/CodeGen/RegisterScavenging.h"
18
#include "llvm/ADT/ArrayRef.h"
19
#include "llvm/ADT/BitVector.h"
20
#include "llvm/ADT/SmallVector.h"
21
#include "llvm/ADT/Statistic.h"
22
#include "llvm/CodeGen/LiveRegUnits.h"
23
#include "llvm/CodeGen/MachineBasicBlock.h"
24
#include "llvm/CodeGen/MachineFrameInfo.h"
25
#include "llvm/CodeGen/MachineFunction.h"
26
#include "llvm/CodeGen/MachineFunctionPass.h"
27
#include "llvm/CodeGen/MachineInstr.h"
28
#include "llvm/CodeGen/MachineOperand.h"
29
#include "llvm/CodeGen/MachineRegisterInfo.h"
30
#include "llvm/CodeGen/TargetFrameLowering.h"
31
#include "llvm/CodeGen/TargetInstrInfo.h"
32
#include "llvm/CodeGen/TargetRegisterInfo.h"
33
#include "llvm/CodeGen/TargetSubtargetInfo.h"
34
#include "llvm/MC/MCRegisterInfo.h"
35
#include "llvm/Pass.h"
36
#include "llvm/Support/Debug.h"
37
#include "llvm/Support/ErrorHandling.h"
38
#include "llvm/Support/raw_ostream.h"
39
#include <algorithm>
40
#include <cassert>
41
#include <iterator>
42
#include <limits>
43
#include <string>
44
#include <utility>
45
46
using namespace llvm;
47
48
#define DEBUG_TYPE "reg-scavenging"
49
50
STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
51
52
7.02k
void RegScavenger::setRegUsed(unsigned Reg, LaneBitmask LaneMask) {
53
7.02k
  LiveUnits.addRegMasked(Reg, LaneMask);
54
7.02k
}
55
56
81.8k
void RegScavenger::init(MachineBasicBlock &MBB) {
57
81.8k
  MachineFunction &MF = *MBB.getParent();
58
81.8k
  TII = MF.getSubtarget().getInstrInfo();
59
81.8k
  TRI = MF.getSubtarget().getRegisterInfo();
60
81.8k
  MRI = &MF.getRegInfo();
61
81.8k
  LiveUnits.init(*TRI);
62
81.8k
63
81.8k
  assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
64
81.8k
         "Target changed?");
65
81.8k
66
81.8k
  // Self-initialize.
67
81.8k
  if (!this->MBB) {
68
3.28k
    NumRegUnits = TRI->getNumRegUnits();
69
3.28k
    KillRegUnits.resize(NumRegUnits);
70
3.28k
    DefRegUnits.resize(NumRegUnits);
71
3.28k
    TmpRegUnits.resize(NumRegUnits);
72
3.28k
  }
73
81.8k
  this->MBB = &MBB;
74
81.8k
75
81.8k
  for (ScavengedInfo &SI : Scavenged) {
76
71.7k
    SI.Reg = 0;
77
71.7k
    SI.Restore = nullptr;
78
71.7k
  }
79
81.8k
80
81.8k
  Tracking = false;
81
81.8k
}
82
83
1.24k
void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
84
1.24k
  init(MBB);
85
1.24k
  LiveUnits.addLiveIns(MBB);
86
1.24k
}
87
88
80.6k
void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
89
80.6k
  init(MBB);
90
80.6k
  LiveUnits.addLiveOuts(MBB);
91
80.6k
92
80.6k
  // Move internal iterator at the last instruction of the block.
93
80.6k
  if (MBB.begin() != MBB.end()) {
94
80.6k
    MBBI = std::prev(MBB.end());
95
80.6k
    Tracking = true;
96
80.6k
  }
97
80.6k
}
98
99
36.1k
void RegScavenger::addRegUnits(BitVector &BV, unsigned Reg) {
100
101k
  for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); 
++RUI65.6k
)
101
65.6k
    BV.set(*RUI);
102
36.1k
}
103
104
0
void RegScavenger::removeRegUnits(BitVector &BV, unsigned Reg) {
105
0
  for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
106
0
    BV.reset(*RUI);
107
0
}
108
109
23.1k
void RegScavenger::determineKillsAndDefs() {
110
23.1k
  assert(Tracking && "Must be tracking to determine kills and defs");
111
23.1k
112
23.1k
  MachineInstr &MI = *MBBI;
113
23.1k
  assert(!MI.isDebugInstr() && "Debug values have no kills or defs");
114
23.1k
115
23.1k
  // Find out which registers are early clobbered, killed, defined, and marked
116
23.1k
  // def-dead in this instruction.
117
23.1k
  KillRegUnits.reset();
118
23.1k
  DefRegUnits.reset();
119
129k
  for (const MachineOperand &MO : MI.operands()) {
120
129k
    if (MO.isRegMask()) {
121
120
      TmpRegUnits.clear();
122
63.5k
      for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; 
++RU63.4k
) {
123
93.2k
        for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); 
++RURI29.7k
) {
124
63.4k
          if (MO.clobbersPhysReg(*RURI)) {
125
33.7k
            TmpRegUnits.set(RU);
126
33.7k
            break;
127
33.7k
          }
128
63.4k
        }
129
63.4k
      }
130
120
131
120
      // Apply the mask.
132
120
      KillRegUnits |= TmpRegUnits;
133
120
    }
134
129k
    if (!MO.isReg())
135
51.1k
      continue;
136
78.3k
    unsigned Reg = MO.getReg();
137
78.3k
    if (!TargetRegisterInfo::isPhysicalRegister(Reg) || 
isReserved(Reg)78.1k
)
138
26.5k
      continue;
139
51.8k
140
51.8k
    if (MO.isUse()) {
141
29.2k
      // Ignore undef uses.
142
29.2k
      if (MO.isUndef())
143
602
        continue;
144
28.6k
      if (MO.isKill())
145
13.5k
        addRegUnits(KillRegUnits, Reg);
146
28.6k
    } else {
147
22.5k
      assert(MO.isDef());
148
22.5k
      if (MO.isDead())
149
1.95k
        addRegUnits(KillRegUnits, Reg);
150
20.6k
      else
151
20.6k
        addRegUnits(DefRegUnits, Reg);
152
22.5k
    }
153
51.8k
  }
154
23.1k
}
155
156
0
void RegScavenger::unprocess() {
157
0
  assert(Tracking && "Cannot unprocess because we're not tracking");
158
0
159
0
  MachineInstr &MI = *MBBI;
160
0
  if (!MI.isDebugInstr()) {
161
0
    determineKillsAndDefs();
162
0
163
0
    // Commit the changes.
164
0
    setUnused(DefRegUnits);
165
0
    setUsed(KillRegUnits);
166
0
  }
167
0
168
0
  if (MBBI == MBB->begin()) {
169
0
    MBBI = MachineBasicBlock::iterator(nullptr);
170
0
    Tracking = false;
171
0
  } else
172
0
    --MBBI;
173
0
}
174
175
23.1k
void RegScavenger::forward() {
176
23.1k
  // Move ptr forward.
177
23.1k
  if (!Tracking) {
178
985
    MBBI = MBB->begin();
179
985
    Tracking = true;
180
22.1k
  } else {
181
22.1k
    assert(MBBI != MBB->end() && "Already past the end of the basic block!");
182
22.1k
    MBBI = std::next(MBBI);
183
22.1k
  }
184
23.1k
  assert(MBBI != MBB->end() && "Already at the end of the basic block!");
185
23.1k
186
23.1k
  MachineInstr &MI = *MBBI;
187
23.1k
188
23.1k
  for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(),
189
37.2k
         IE = Scavenged.end(); I != IE; 
++I14.1k
) {
190
14.1k
    if (I->Restore != &MI)
191
14.1k
      continue;
192
3
193
3
    I->Reg = 0;
194
3
    I->Restore = nullptr;
195
3
  }
196
23.1k
197
23.1k
  if (MI.isDebugInstr())
198
0
    return;
199
23.1k
200
23.1k
  determineKillsAndDefs();
201
23.1k
202
23.1k
  // Verify uses and defs.
203
#ifndef NDEBUG
204
  for (const MachineOperand &MO : MI.operands()) {
205
    if (!MO.isReg())
206
      continue;
207
    unsigned Reg = MO.getReg();
208
    if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg))
209
      continue;
210
    if (MO.isUse()) {
211
      if (MO.isUndef())
212
        continue;
213
      if (!isRegUsed(Reg)) {
214
        // Check if it's partial live: e.g.
215
        // D0 = insert_subreg undef D0, S0
216
        // ... D0
217
        // The problem is the insert_subreg could be eliminated. The use of
218
        // D0 is using a partially undef value. This is not *incorrect* since
219
        // S1 is can be freely clobbered.
220
        // Ideally we would like a way to model this, but leaving the
221
        // insert_subreg around causes both correctness and performance issues.
222
        bool SubUsed = false;
223
        for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
224
          if (isRegUsed(*SubRegs)) {
225
            SubUsed = true;
226
            break;
227
          }
228
        bool SuperUsed = false;
229
        for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
230
          if (isRegUsed(*SR)) {
231
            SuperUsed = true;
232
            break;
233
          }
234
        }
235
        if (!SubUsed && !SuperUsed) {
236
          MBB->getParent()->verify(nullptr, "In Register Scavenger");
237
          llvm_unreachable("Using an undefined register!");
238
        }
239
        (void)SubUsed;
240
        (void)SuperUsed;
241
      }
242
    } else {
243
      assert(MO.isDef());
244
#if 0
245
      // FIXME: Enable this once we've figured out how to correctly transfer
246
      // implicit kills during codegen passes like the coalescer.
247
      assert((KillRegs.test(Reg) || isUnused(Reg) ||
248
              isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
249
             "Re-defining a live register!");
250
#endif
251
    }
252
  }
253
#endif // NDEBUG
254
255
23.1k
  // Commit the changes.
256
23.1k
  setUnused(KillRegUnits);
257
23.1k
  setUsed(DefRegUnits);
258
23.1k
}
259
260
593k
void RegScavenger::backward() {
261
593k
  assert(Tracking && "Must be tracking to determine kills and defs");
262
593k
263
593k
  const MachineInstr &MI = *MBBI;
264
593k
  LiveUnits.stepBackward(MI);
265
593k
266
593k
  // Expire scavenge spill frameindex uses.
267
593k
  for (ScavengedInfo &I : Scavenged) {
268
536k
    if (I.Restore == &MI) {
269
27
      I.Reg = 0;
270
27
      I.Restore = nullptr;
271
27
    }
272
536k
  }
273
593k
274
593k
  if (MBBI == MBB->begin()) {
275
0
    MBBI = MachineBasicBlock::iterator(nullptr);
276
0
    Tracking = false;
277
0
  } else
278
593k
    --MBBI;
279
593k
}
280
281
159k
bool RegScavenger::isRegUsed(unsigned Reg, bool includeReserved) const {
282
159k
  if (isReserved(Reg))
283
22.3k
    return includeReserved;
284
137k
  return !LiveUnits.available(Reg);
285
137k
}
286
287
330
unsigned RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
288
1.07k
  for (unsigned Reg : *RC) {
289
1.07k
    if (!isRegUsed(Reg)) {
290
328
      LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI)
291
328
                        << "\n");
292
328
      return Reg;
293
328
    }
294
1.07k
  }
295
330
  
return 02
;
296
330
}
297
298
790
BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
299
790
  BitVector Mask(TRI->getNumRegs());
300
790
  for (unsigned Reg : *RC)
301
157k
    if (!isRegUsed(Reg))
302
123k
      Mask.set(Reg);
303
790
  return Mask;
304
790
}
305
306
unsigned RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
307
                                       BitVector &Candidates,
308
                                       unsigned InstrLimit,
309
790
                                       MachineBasicBlock::iterator &UseMI) {
310
790
  int Survivor = Candidates.find_first();
311
790
  assert(Survivor > 0 && "No candidates for scavenging");
312
790
313
790
  MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
314
790
  assert(StartMI != ME && "MI already at terminator");
315
790
  MachineBasicBlock::iterator RestorePointMI = StartMI;
316
790
  MachineBasicBlock::iterator MI = StartMI;
317
790
318
790
  bool inVirtLiveRange = false;
319
16.4k
  for (++MI; InstrLimit > 0 && 
MI != ME16.1k
;
++MI, --InstrLimit15.6k
) {
320
15.6k
    if (MI->isDebugInstr()) {
321
0
      ++InstrLimit; // Don't count debug instructions
322
0
      continue;
323
0
    }
324
15.6k
    bool isVirtKillInsn = false;
325
15.6k
    bool isVirtDefInsn = false;
326
15.6k
    // Remove any candidates touched by instruction.
327
82.8k
    for (const MachineOperand &MO : MI->operands()) {
328
82.8k
      if (MO.isRegMask())
329
0
        Candidates.clearBitsNotInMask(MO.getRegMask());
330
82.8k
      if (!MO.isReg() || 
MO.isUndef()54.5k
||
!MO.getReg()54.5k
)
331
28.3k
        continue;
332
54.5k
      if (TargetRegisterInfo::isVirtualRegister(MO.getReg())) {
333
0
        if (MO.isDef())
334
0
          isVirtDefInsn = true;
335
0
        else if (MO.isKill())
336
0
          isVirtKillInsn = true;
337
0
        continue;
338
0
      }
339
2.59M
      
for (MCRegAliasIterator AI(MO.getReg(), TRI, true); 54.5k
AI.isValid();
++AI2.54M
)
340
2.54M
        Candidates.reset(*AI);
341
54.5k
    }
342
15.6k
    // If we're not in a virtual reg's live range, this is a valid
343
15.6k
    // restore point.
344
15.6k
    if (!inVirtLiveRange) RestorePointMI = MI;
345
15.6k
346
15.6k
    // Update whether we're in the live range of a virtual register
347
15.6k
    if (isVirtKillInsn) 
inVirtLiveRange = false0
;
348
15.6k
    if (isVirtDefInsn) 
inVirtLiveRange = true0
;
349
15.6k
350
15.6k
    // Was our survivor untouched by this instruction?
351
15.6k
    if (Candidates.test(Survivor))
352
10.9k
      continue;
353
4.76k
354
4.76k
    // All candidates gone?
355
4.76k
    if (Candidates.none())
356
22
      break;
357
4.74k
358
4.74k
    Survivor = Candidates.find_first();
359
4.74k
  }
360
790
  // If we ran off the end, that's where we want to restore.
361
790
  if (MI == ME) 
RestorePointMI = ME465
;
362
790
  assert(RestorePointMI != StartMI &&
363
790
         "No available scavenger restore location!");
364
790
365
790
  // We ran out of candidates, so stop the search.
366
790
  UseMI = RestorePointMI;
367
790
  return Survivor;
368
790
}
369
370
/// Given the bitvector \p Available of free register units at position
371
/// \p From. Search backwards to find a register that is part of \p
372
/// Candidates and not used/clobbered until the point \p To. If there is
373
/// multiple candidates continue searching and pick the one that is not used/
374
/// clobbered for the longest time.
375
/// Returns the register and the earliest position we know it to be free or
376
/// the position MBB.end() if no register is available.
377
static std::pair<MCPhysReg, MachineBasicBlock::iterator>
378
findSurvivorBackwards(const MachineRegisterInfo &MRI,
379
    MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
380
    const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
381
6.48k
    bool RestoreAfter) {
382
6.48k
  bool FoundTo = false;
383
6.48k
  MCPhysReg Survivor = 0;
384
6.48k
  MachineBasicBlock::iterator Pos;
385
6.48k
  MachineBasicBlock &MBB = *From->getParent();
386
6.48k
  unsigned InstrLimit = 25;
387
6.48k
  unsigned InstrCountDown = InstrLimit;
388
6.48k
  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
389
6.48k
  LiveRegUnits Used(TRI);
390
6.48k
391
7.56k
  for (MachineBasicBlock::iterator I = From;; 
--I1.08k
) {
392
7.56k
    const MachineInstr &MI = *I;
393
7.56k
394
7.56k
    Used.accumulate(MI);
395
7.56k
396
7.56k
    if (I == To) {
397
6.48k
      // See if one of the registers in RC wasn't used so far.
398
14.7k
      for (MCPhysReg Reg : AllocationOrder) {
399
14.7k
        if (!MRI.isReserved(Reg) && 
Used.available(Reg)13.9k
&&
400
14.7k
            
LiveOut.available(Reg)13.9k
)
401
6.45k
          return std::make_pair(Reg, MBB.end());
402
14.7k
      }
403
6.48k
      // Otherwise we will continue up to InstrLimit instructions to find
404
6.48k
      // the register which is not defined/used for the longest time.
405
6.48k
      FoundTo = true;
406
30
      Pos = To;
407
30
      // Note: It was fine so far to start our search at From, however now that
408
30
      // we have to spill, and can only place the restore after From then
409
30
      // add the regs used/defed by std::next(From) to the set.
410
30
      if (RestoreAfter)
411
29
        Used.accumulate(*std::next(From));
412
30
    }
413
7.56k
    
if (1.11k
FoundTo1.11k
) {
414
483
      if (Survivor == 0 || 
!Used.available(Survivor)453
) {
415
64
        MCPhysReg AvilableReg = 0;
416
364
        for (MCPhysReg Reg : AllocationOrder) {
417
364
          if (!MRI.isReserved(Reg) && 
Used.available(Reg)331
) {
418
53
            AvilableReg = Reg;
419
53
            break;
420
53
          }
421
364
        }
422
64
        if (AvilableReg == 0)
423
11
          break;
424
53
        Survivor = AvilableReg;
425
53
      }
426
483
      
if (472
--InstrCountDown == 0472
)
427
6
        break;
428
466
429
466
      // Keep searching when we find a vreg since the spilled register will
430
466
      // be usefull for this other vreg as well later.
431
466
      bool FoundVReg = false;
432
1.28k
      for (const MachineOperand &MO : MI.operands()) {
433
1.28k
        if (MO.isReg() && 
TargetRegisterInfo::isVirtualRegister(MO.getReg())961
) {
434
48
          FoundVReg = true;
435
48
          break;
436
48
        }
437
1.28k
      }
438
466
      if (FoundVReg) {
439
48
        InstrCountDown = InstrLimit;
440
48
        Pos = I;
441
48
      }
442
466
      if (I == MBB.begin())
443
13
        break;
444
466
    }
445
1.11k
  }
446
6.48k
447
6.48k
  
return std::make_pair(Survivor, Pos)30
;
448
6.48k
}
449
450
64
static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
451
64
  unsigned i = 0;
452
131
  while (!MI.getOperand(i).isFI()) {
453
67
    ++i;
454
67
    assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
455
67
  }
456
64
  return i;
457
64
}
458
459
RegScavenger::ScavengedInfo &
460
RegScavenger::spill(unsigned Reg, const TargetRegisterClass &RC, int SPAdj,
461
                    MachineBasicBlock::iterator Before,
462
33
                    MachineBasicBlock::iterator &UseMI) {
463
33
  // Find an available scavenging slot with size and alignment matching
464
33
  // the requirements of the class RC.
465
33
  const MachineFunction &MF = *Before->getMF();
466
33
  const MachineFrameInfo &MFI = MF.getFrameInfo();
467
33
  unsigned NeedSize = TRI->getSpillSize(RC);
468
33
  unsigned NeedAlign = TRI->getSpillAlignment(RC);
469
33
470
33
  unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
471
33
  int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
472
82
  for (unsigned I = 0; I < Scavenged.size(); 
++I49
) {
473
49
    if (Scavenged[I].Reg != 0)
474
1
      continue;
475
48
    // Verify that this slot is valid for this register.
476
48
    int FI = Scavenged[I].FrameIndex;
477
48
    if (FI < FIB || FI >= FIE)
478
0
      continue;
479
48
    unsigned S = MFI.getObjectSize(FI);
480
48
    unsigned A = MFI.getObjectAlignment(FI);
481
48
    if (NeedSize > S || NeedAlign > A)
482
0
      continue;
483
48
    // Avoid wasting slots with large size and/or large alignment. Pick one
484
48
    // that is the best fit for this register class (in street metric).
485
48
    // Picking a larger slot than necessary could happen if a slot for a
486
48
    // larger register is reserved before a slot for a smaller one. When
487
48
    // trying to spill a smaller register, the large slot would be found
488
48
    // first, thus making it impossible to spill the larger register later.
489
48
    unsigned D = (S-NeedSize) + (A-NeedAlign);
490
48
    if (D < Diff) {
491
32
      SI = I;
492
32
      Diff = D;
493
32
    }
494
48
  }
495
33
496
33
  if (SI == Scavenged.size()) {
497
1
    // We need to scavenge a register but have no spill slot, the target
498
1
    // must know how to do it (if not, we'll assert below).
499
1
    Scavenged.push_back(ScavengedInfo(FIE));
500
1
  }
501
33
502
33
  // Avoid infinite regress
503
33
  Scavenged[SI].Reg = Reg;
504
33
505
33
  // If the target knows how to save/restore the register, let it do so;
506
33
  // otherwise, use the emergency stack spill slot.
507
33
  if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
508
33
    // Spill the scavenged register before \p Before.
509
33
    int FI = Scavenged[SI].FrameIndex;
510
33
    if (FI < FIB || FI >= FIE) {
511
1
      std::string Msg = std::string("Error while trying to spill ") +
512
1
          TRI->getName(Reg) + " from class " + TRI->getRegClassName(&RC) +
513
1
          ": Cannot scavenge register without an emergency spill slot!";
514
1
      report_fatal_error(Msg.c_str());
515
1
    }
516
32
    TII->storeRegToStackSlot(*MBB, Before, Reg, true, Scavenged[SI].FrameIndex,
517
32
                             &RC, TRI);
518
32
    MachineBasicBlock::iterator II = std::prev(Before);
519
32
520
32
    unsigned FIOperandNum = getFrameIndexOperandNum(*II);
521
32
    TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
522
32
523
32
    // Restore the scavenged register before its use (or first terminator).
524
32
    TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex,
525
32
                              &RC, TRI);
526
32
    II = std::prev(UseMI);
527
32
528
32
    FIOperandNum = getFrameIndexOperandNum(*II);
529
32
    TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
530
32
  }
531
33
  
return Scavenged[SI]32
;
532
33
}
533
534
unsigned RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
535
                                        MachineBasicBlock::iterator I,
536
790
                                        int SPAdj, bool AllowSpill) {
537
790
  MachineInstr &MI = *I;
538
790
  const MachineFunction &MF = *MI.getMF();
539
790
  // Consider all allocatable registers in the register class initially
540
790
  BitVector Candidates = TRI->getAllocatableSet(MF, RC);
541
790
542
790
  // Exclude all the registers being used by the instruction.
543
3.10k
  for (const MachineOperand &MO : MI.operands()) {
544
3.10k
    if (MO.isReg() && 
MO.getReg() != 02.54k
&&
!(2.54k
MO.isUse()2.54k
&&
MO.isUndef()1.90k
) &&
545
3.10k
        
!TargetRegisterInfo::isVirtualRegister(MO.getReg())2.54k
)
546
283k
      
for (MCRegAliasIterator AI(MO.getReg(), TRI, true); 2.54k
AI.isValid();
++AI281k
)
547
281k
        Candidates.reset(*AI);
548
3.10k
  }
549
790
550
790
  // Try to find a register that's unused if there is one, as then we won't
551
790
  // have to spill.
552
790
  BitVector Available = getRegsAvailable(RC);
553
790
  Available &= Candidates;
554
790
  if (Available.any())
555
784
    Candidates = Available;
556
790
557
790
  // Find the register whose use is furthest away.
558
790
  MachineBasicBlock::iterator UseMI;
559
790
  unsigned SReg = findSurvivorReg(I, Candidates, 25, UseMI);
560
790
561
790
  // If we found an unused register there is no reason to spill it.
562
790
  if (!isRegUsed(SReg)) {
563
784
    LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg, TRI) << "\n");
564
784
    return SReg;
565
784
  }
566
6
567
6
  if (!AllowSpill)
568
3
    return 0;
569
3
570
3
  ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
571
3
  Scavenged.Restore = &*std::prev(UseMI);
572
3
573
3
  LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
574
3
                    << printReg(SReg, TRI) << "\n");
575
3
576
3
  return SReg;
577
3
}
578
579
unsigned RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
580
                                                 MachineBasicBlock::iterator To,
581
                                                 bool RestoreAfter, int SPAdj,
582
6.48k
                                                 bool AllowSpill) {
583
6.48k
  const MachineBasicBlock &MBB = *To->getParent();
584
6.48k
  const MachineFunction &MF = *MBB.getParent();
585
6.48k
586
6.48k
  // Find the register whose use is furthest away.
587
6.48k
  MachineBasicBlock::iterator UseMI;
588
6.48k
  ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
589
6.48k
  std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
590
6.48k
      findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
591
6.48k
                            RestoreAfter);
592
6.48k
  MCPhysReg Reg = P.first;
593
6.48k
  MachineBasicBlock::iterator SpillBefore = P.second;
594
6.48k
  assert(Reg != 0 && "No register left to scavenge!");
595
6.48k
  // Found an available register?
596
6.48k
  if (SpillBefore == MBB.end()) {
597
6.45k
    LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI)
598
6.45k
               << '\n');
599
6.45k
    return Reg;
600
6.45k
  }
601
30
602
30
  if (!AllowSpill)
603
0
    return 0;
604
30
605
30
  MachineBasicBlock::iterator ReloadAfter =
606
30
    RestoreAfter ? 
std::next(MBBI)29
:
MBBI1
;
607
30
  MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
608
30
  if (ReloadBefore != MBB.end())
609
30
    LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
610
30
  ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
611
30
  Scavenged.Restore = &*std::prev(SpillBefore);
612
30
  LiveUnits.removeReg(Reg);
613
30
  LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI)
614
30
             << " until " << *SpillBefore);
615
30
  return Reg;
616
30
}
617
618
/// Allocate a register for the virtual register \p VReg. The last use of
619
/// \p VReg is around the current position of the register scavenger \p RS.
620
/// \p ReserveAfter controls whether the scavenged register needs to be reserved
621
/// after the current instruction, otherwise it will only be reserved before the
622
/// current instruction.
623
static unsigned scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
624
6.44k
                             unsigned VReg, bool ReserveAfter) {
625
6.44k
  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
626
#ifndef NDEBUG
627
  // Verify that all definitions and uses are in the same basic block.
628
  const MachineBasicBlock *CommonMBB = nullptr;
629
  // Real definition for the reg, re-definitions are not considered.
630
  const MachineInstr *RealDef = nullptr;
631
  for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
632
    MachineBasicBlock *MBB = MO.getParent()->getParent();
633
    if (CommonMBB == nullptr)
634
      CommonMBB = MBB;
635
    assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
636
    if (MO.isDef()) {
637
      const MachineInstr &MI = *MO.getParent();
638
      if (!MI.readsRegister(VReg, &TRI)) {
639
        assert((!RealDef || RealDef == &MI) &&
640
               "Can have at most one definition which is not a redefinition");
641
        RealDef = &MI;
642
      }
643
    }
644
  }
645
  assert(RealDef != nullptr && "Must have at least 1 Def");
646
#endif
647
648
6.44k
  // We should only have one definition of the register. However to accommodate
649
6.44k
  // the requirements of two address code we also allow definitions in
650
6.44k
  // subsequent instructions provided they also read the register. That way
651
6.44k
  // we get a single contiguous lifetime.
652
6.44k
  //
653
6.44k
  // Definitions in MRI.def_begin() are unordered, search for the first.
654
6.44k
  MachineRegisterInfo::def_iterator FirstDef =
655
6.44k
    std::find_if(MRI.def_begin(VReg), MRI.def_end(),
656
6.80k
                 [VReg, &TRI](const MachineOperand &MO) {
657
6.80k
      return !MO.getParent()->readsRegister(VReg, &TRI);
658
6.80k
    });
659
6.44k
  assert(FirstDef != MRI.def_end() &&
660
6.44k
         "Must have one definition that does not redefine vreg");
661
6.44k
  MachineInstr &DefMI = *FirstDef->getParent();
662
6.44k
663
6.44k
  // The register scavenger will report a free register inserting an emergency
664
6.44k
  // spill/reload if necessary.
665
6.44k
  int SPAdj = 0;
666
6.44k
  const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
667
6.44k
  unsigned SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
668
6.44k
                                               ReserveAfter, SPAdj);
669
6.44k
  MRI.replaceRegWith(VReg, SReg);
670
6.44k
  ++NumScavengedRegs;
671
6.44k
  return SReg;
672
6.44k
}
673
674
/// Allocate (scavenge) vregs inside a single basic block.
675
/// Returns true if the target spill callback created new vregs and a 2nd pass
676
/// is necessary.
677
static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
678
                                            RegScavenger &RS,
679
80.5k
                                            MachineBasicBlock &MBB) {
680
80.5k
  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
681
80.5k
  RS.enterBasicBlockEnd(MBB);
682
80.5k
683
80.5k
  unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
684
80.5k
  bool NextInstructionReadsVReg = false;
685
754k
  for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
686
674k
    --I;
687
674k
    // Move RegScavenger to the position between *I and *std::next(I).
688
674k
    RS.backward(I);
689
674k
690
674k
    // Look for unassigned vregs in the uses of *std::next(I).
691
674k
    if (NextInstructionReadsVReg) {
692
6.41k
      MachineBasicBlock::iterator N = std::next(I);
693
6.41k
      const MachineInstr &NMI = *N;
694
22.4k
      for (const MachineOperand &MO : NMI.operands()) {
695
22.4k
        if (!MO.isReg())
696
5.84k
          continue;
697
16.5k
        unsigned Reg = MO.getReg();
698
16.5k
        // We only care about virtual registers and ignore virtual registers
699
16.5k
        // created by the target callbacks in the process (those will be handled
700
16.5k
        // in a scavenging round).
701
16.5k
        if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
702
16.5k
            
TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs6.43k
)
703
10.1k
          continue;
704
6.43k
        if (!MO.readsReg())
705
0
          continue;
706
6.43k
707
6.43k
        unsigned SReg = scavengeVReg(MRI, RS, Reg, true);
708
6.43k
        N->addRegisterKilled(SReg, &TRI, false);
709
6.43k
        RS.setRegUsed(SReg);
710
6.43k
      }
711
6.41k
    }
712
674k
713
674k
    // Look for unassigned vregs in the defs of *I.
714
674k
    NextInstructionReadsVReg = false;
715
674k
    const MachineInstr &MI = *I;
716
2.16M
    for (const MachineOperand &MO : MI.operands()) {
717
2.16M
      if (!MO.isReg())
718
735k
        continue;
719
1.43M
      unsigned Reg = MO.getReg();
720
1.43M
      // Only vregs, no newly created vregs (see above).
721
1.43M
      if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
722
1.43M
          
TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs6.44k
)
723
1.42M
        continue;
724
6.44k
      // We have to look at all operands anyway so we can precalculate here
725
6.44k
      // whether there is a reading operand. This allows use to skip the use
726
6.44k
      // step in the next iteration if there was none.
727
6.44k
      assert(!MO.isInternalRead() && "Cannot assign inside bundles");
728
6.44k
      assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
729
6.44k
      if (MO.readsReg()) {
730
6.43k
        NextInstructionReadsVReg = true;
731
6.43k
      }
732
6.44k
      if (MO.isDef()) {
733
9
        unsigned SReg = scavengeVReg(MRI, RS, Reg, false);
734
9
        I->addRegisterDead(SReg, &TRI, false);
735
9
      }
736
6.44k
    }
737
674k
  }
738
#ifndef NDEBUG
739
  for (const MachineOperand &MO : MBB.front().operands()) {
740
    if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
741
      continue;
742
    assert(!MO.isInternalRead() && "Cannot assign inside bundles");
743
    assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
744
    assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
745
  }
746
#endif
747
748
80.5k
  return MRI.getNumVirtRegs() != InitialNumVirtRegs;
749
80.5k
}
750
751
327k
void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
752
327k
  // FIXME: Iterating over the instruction stream is unnecessary. We can simply
753
327k
  // iterate over the vreg use list, which at this point only contains machine
754
327k
  // operands for which eliminateFrameIndex need a new scratch reg.
755
327k
  MachineRegisterInfo &MRI = MF.getRegInfo();
756
327k
  // Shortcut.
757
327k
  if (MRI.getNumVirtRegs() == 0) {
758
325k
    MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
759
325k
    return;
760
325k
  }
761
2.24k
762
2.24k
  // Run through the instructions and find any virtual registers.
763
80.7k
  
for (MachineBasicBlock &MBB : MF)2.24k
{
764
80.7k
    if (MBB.empty())
765
148
      continue;
766
80.5k
767
80.5k
    bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
768
80.5k
    if (Again) {
769
0
      LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
770
0
                        << MBB.getName() << '\n');
771
0
      Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
772
0
      // The target required a 2nd run (because it created new vregs while
773
0
      // spilling). Refuse to do another pass to keep compiletime in check.
774
0
      if (Again)
775
0
        report_fatal_error("Incomplete scavenging after 2nd pass");
776
0
    }
777
80.5k
  }
778
2.24k
779
2.24k
  MRI.clearVirtRegs();
780
2.24k
  MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
781
2.24k
}
782
783
namespace {
784
785
/// This class runs register scavenging independ of the PrologEpilogInserter.
786
/// This is used in for testing.
787
class ScavengerTest : public MachineFunctionPass {
788
public:
789
  static char ID;
790
791
2
  ScavengerTest() : MachineFunctionPass(ID) {}
792
793
6
  bool runOnMachineFunction(MachineFunction &MF) override {
794
6
    const TargetSubtargetInfo &STI = MF.getSubtarget();
795
6
    const TargetFrameLowering &TFL = *STI.getFrameLowering();
796
6
797
6
    RegScavenger RS;
798
6
    // Let's hope that calling those outside of PrologEpilogueInserter works
799
6
    // well enough to initialize the scavenger with some emergency spillslots
800
6
    // for the target.
801
6
    BitVector SavedRegs;
802
6
    TFL.determineCalleeSaves(MF, SavedRegs, &RS);
803
6
    TFL.processFunctionBeforeFrameFinalized(MF, &RS);
804
6
805
6
    // Let's scavenge the current function
806
6
    scavengeFrameVirtualRegs(MF, RS);
807
6
    return true;
808
6
  }
809
};
810
811
} // end anonymous namespace
812
813
char ScavengerTest::ID;
814
815
INITIALIZE_PASS(ScavengerTest, "scavenger-test",
816
                "Scavenge virtual registers inside basic blocks", false, false)