/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/CodeGen/SjLjEHPrepare.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===- SjLjEHPrepare.cpp - Eliminate Invoke & Unwind instructions ---------===// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // This transformation is designed for use by code generators which use SjLj |
10 | | // based exception handling. |
11 | | // |
12 | | //===----------------------------------------------------------------------===// |
13 | | |
14 | | #include "llvm/ADT/SetVector.h" |
15 | | #include "llvm/ADT/SmallPtrSet.h" |
16 | | #include "llvm/ADT/SmallVector.h" |
17 | | #include "llvm/ADT/Statistic.h" |
18 | | #include "llvm/Transforms/Utils/Local.h" |
19 | | #include "llvm/CodeGen/Passes.h" |
20 | | #include "llvm/IR/Constants.h" |
21 | | #include "llvm/IR/DataLayout.h" |
22 | | #include "llvm/IR/DerivedTypes.h" |
23 | | #include "llvm/IR/IRBuilder.h" |
24 | | #include "llvm/IR/Instructions.h" |
25 | | #include "llvm/IR/Intrinsics.h" |
26 | | #include "llvm/IR/Module.h" |
27 | | #include "llvm/Pass.h" |
28 | | #include "llvm/Support/Debug.h" |
29 | | #include "llvm/Support/raw_ostream.h" |
30 | | using namespace llvm; |
31 | | |
32 | | #define DEBUG_TYPE "sjljehprepare" |
33 | | |
34 | | STATISTIC(NumInvokes, "Number of invokes replaced"); |
35 | | STATISTIC(NumSpilled, "Number of registers live across unwind edges"); |
36 | | |
37 | | namespace { |
38 | | class SjLjEHPrepare : public FunctionPass { |
39 | | Type *doubleUnderDataTy; |
40 | | Type *doubleUnderJBufTy; |
41 | | Type *FunctionContextTy; |
42 | | FunctionCallee RegisterFn; |
43 | | FunctionCallee UnregisterFn; |
44 | | Function *BuiltinSetupDispatchFn; |
45 | | Function *FrameAddrFn; |
46 | | Function *StackAddrFn; |
47 | | Function *StackRestoreFn; |
48 | | Function *LSDAAddrFn; |
49 | | Function *CallSiteFn; |
50 | | Function *FuncCtxFn; |
51 | | AllocaInst *FuncCtx; |
52 | | |
53 | | public: |
54 | | static char ID; // Pass identification, replacement for typeid |
55 | 1.72k | explicit SjLjEHPrepare() : FunctionPass(ID) {} |
56 | | bool doInitialization(Module &M) override; |
57 | | bool runOnFunction(Function &F) override; |
58 | | |
59 | 1.72k | void getAnalysisUsage(AnalysisUsage &AU) const override {} |
60 | 8.31k | StringRef getPassName() const override { |
61 | 8.31k | return "SJLJ Exception Handling preparation"; |
62 | 8.31k | } |
63 | | |
64 | | private: |
65 | | bool setupEntryBlockAndCallSites(Function &F); |
66 | | void substituteLPadValues(LandingPadInst *LPI, Value *ExnVal, Value *SelVal); |
67 | | Value *setupFunctionContext(Function &F, ArrayRef<LandingPadInst *> LPads); |
68 | | void lowerIncomingArguments(Function &F); |
69 | | void lowerAcrossUnwindEdges(Function &F, ArrayRef<InvokeInst *> Invokes); |
70 | | void insertCallSiteStore(Instruction *I, int Number); |
71 | | }; |
72 | | } // end anonymous namespace |
73 | | |
74 | | char SjLjEHPrepare::ID = 0; |
75 | | INITIALIZE_PASS(SjLjEHPrepare, DEBUG_TYPE, "Prepare SjLj exceptions", |
76 | | false, false) |
77 | | |
78 | | // Public Interface To the SjLjEHPrepare pass. |
79 | 1.72k | FunctionPass *llvm::createSjLjEHPreparePass() { return new SjLjEHPrepare(); } |
80 | | // doInitialization - Set up decalarations and types needed to process |
81 | | // exceptions. |
82 | 1.72k | bool SjLjEHPrepare::doInitialization(Module &M) { |
83 | 1.72k | // Build the function context structure. |
84 | 1.72k | // builtin_setjmp uses a five word jbuf |
85 | 1.72k | Type *VoidPtrTy = Type::getInt8PtrTy(M.getContext()); |
86 | 1.72k | Type *Int32Ty = Type::getInt32Ty(M.getContext()); |
87 | 1.72k | doubleUnderDataTy = ArrayType::get(Int32Ty, 4); |
88 | 1.72k | doubleUnderJBufTy = ArrayType::get(VoidPtrTy, 5); |
89 | 1.72k | FunctionContextTy = StructType::get(VoidPtrTy, // __prev |
90 | 1.72k | Int32Ty, // call_site |
91 | 1.72k | doubleUnderDataTy, // __data |
92 | 1.72k | VoidPtrTy, // __personality |
93 | 1.72k | VoidPtrTy, // __lsda |
94 | 1.72k | doubleUnderJBufTy // __jbuf |
95 | 1.72k | ); |
96 | 1.72k | |
97 | 1.72k | return true; |
98 | 1.72k | } |
99 | | |
100 | | /// insertCallSiteStore - Insert a store of the call-site value to the |
101 | | /// function context |
102 | 964 | void SjLjEHPrepare::insertCallSiteStore(Instruction *I, int Number) { |
103 | 964 | IRBuilder<> Builder(I); |
104 | 964 | |
105 | 964 | // Get a reference to the call_site field. |
106 | 964 | Type *Int32Ty = Type::getInt32Ty(I->getContext()); |
107 | 964 | Value *Zero = ConstantInt::get(Int32Ty, 0); |
108 | 964 | Value *One = ConstantInt::get(Int32Ty, 1); |
109 | 964 | Value *Idxs[2] = { Zero, One }; |
110 | 964 | Value *CallSite = |
111 | 964 | Builder.CreateGEP(FunctionContextTy, FuncCtx, Idxs, "call_site"); |
112 | 964 | |
113 | 964 | // Insert a store of the call-site number |
114 | 964 | ConstantInt *CallSiteNoC = |
115 | 964 | ConstantInt::get(Type::getInt32Ty(I->getContext()), Number); |
116 | 964 | Builder.CreateStore(CallSiteNoC, CallSite, true /*volatile*/); |
117 | 964 | } |
118 | | |
119 | | /// MarkBlocksLiveIn - Insert BB and all of its predecessors into LiveBBs until |
120 | | /// we reach blocks we've already seen. |
121 | | static void MarkBlocksLiveIn(BasicBlock *BB, |
122 | 612 | SmallPtrSetImpl<BasicBlock *> &LiveBBs) { |
123 | 612 | if (!LiveBBs.insert(BB).second) |
124 | 322 | return; // already been here. |
125 | 290 | |
126 | 290 | df_iterator_default_set<BasicBlock*> Visited; |
127 | 290 | |
128 | 290 | for (BasicBlock *B : inverse_depth_first_ext(BB, Visited)) |
129 | 3.72k | LiveBBs.insert(B); |
130 | 290 | |
131 | 290 | } |
132 | | |
133 | | /// substituteLPadValues - Substitute the values returned by the landingpad |
134 | | /// instruction with those returned by the personality function. |
135 | | void SjLjEHPrepare::substituteLPadValues(LandingPadInst *LPI, Value *ExnVal, |
136 | 131 | Value *SelVal) { |
137 | 131 | SmallVector<Value *, 8> UseWorkList(LPI->user_begin(), LPI->user_end()); |
138 | 341 | while (!UseWorkList.empty()) { |
139 | 210 | Value *Val = UseWorkList.pop_back_val(); |
140 | 210 | auto *EVI = dyn_cast<ExtractValueInst>(Val); |
141 | 210 | if (!EVI) |
142 | 9 | continue; |
143 | 201 | if (EVI->getNumIndices() != 1) |
144 | 0 | continue; |
145 | 201 | if (*EVI->idx_begin() == 0) |
146 | 106 | EVI->replaceAllUsesWith(ExnVal); |
147 | 95 | else if (*EVI->idx_begin() == 1) |
148 | 95 | EVI->replaceAllUsesWith(SelVal); |
149 | 201 | if (EVI->use_empty()) |
150 | 201 | EVI->eraseFromParent(); |
151 | 201 | } |
152 | 131 | |
153 | 131 | if (LPI->use_empty()) |
154 | 122 | return; |
155 | 9 | |
156 | 9 | // There are still some uses of LPI. Construct an aggregate with the exception |
157 | 9 | // values and replace the LPI with that aggregate. |
158 | 9 | Type *LPadType = LPI->getType(); |
159 | 9 | Value *LPadVal = UndefValue::get(LPadType); |
160 | 9 | auto *SelI = cast<Instruction>(SelVal); |
161 | 9 | IRBuilder<> Builder(SelI->getParent(), std::next(SelI->getIterator())); |
162 | 9 | LPadVal = Builder.CreateInsertValue(LPadVal, ExnVal, 0, "lpad.val"); |
163 | 9 | LPadVal = Builder.CreateInsertValue(LPadVal, SelVal, 1, "lpad.val"); |
164 | 9 | |
165 | 9 | LPI->replaceAllUsesWith(LPadVal); |
166 | 9 | } |
167 | | |
168 | | /// setupFunctionContext - Allocate the function context on the stack and fill |
169 | | /// it with all of the data that we know at this point. |
170 | | Value *SjLjEHPrepare::setupFunctionContext(Function &F, |
171 | 39 | ArrayRef<LandingPadInst *> LPads) { |
172 | 39 | BasicBlock *EntryBB = &F.front(); |
173 | 39 | |
174 | 39 | // Create an alloca for the incoming jump buffer ptr and the new jump buffer |
175 | 39 | // that needs to be restored on all exits from the function. This is an alloca |
176 | 39 | // because the value needs to be added to the global context list. |
177 | 39 | auto &DL = F.getParent()->getDataLayout(); |
178 | 39 | unsigned Align = DL.getPrefTypeAlignment(FunctionContextTy); |
179 | 39 | FuncCtx = new AllocaInst(FunctionContextTy, DL.getAllocaAddrSpace(), |
180 | 39 | nullptr, Align, "fn_context", &EntryBB->front()); |
181 | 39 | |
182 | 39 | // Fill in the function context structure. |
183 | 131 | for (LandingPadInst *LPI : LPads) { |
184 | 131 | IRBuilder<> Builder(LPI->getParent(), |
185 | 131 | LPI->getParent()->getFirstInsertionPt()); |
186 | 131 | |
187 | 131 | // Reference the __data field. |
188 | 131 | Value *FCData = |
189 | 131 | Builder.CreateConstGEP2_32(FunctionContextTy, FuncCtx, 0, 2, "__data"); |
190 | 131 | |
191 | 131 | // The exception values come back in context->__data[0]. |
192 | 131 | Type *Int32Ty = Type::getInt32Ty(F.getContext()); |
193 | 131 | Value *ExceptionAddr = Builder.CreateConstGEP2_32(doubleUnderDataTy, FCData, |
194 | 131 | 0, 0, "exception_gep"); |
195 | 131 | Value *ExnVal = Builder.CreateLoad(Int32Ty, ExceptionAddr, true, "exn_val"); |
196 | 131 | ExnVal = Builder.CreateIntToPtr(ExnVal, Builder.getInt8PtrTy()); |
197 | 131 | |
198 | 131 | Value *SelectorAddr = Builder.CreateConstGEP2_32(doubleUnderDataTy, FCData, |
199 | 131 | 0, 1, "exn_selector_gep"); |
200 | 131 | Value *SelVal = |
201 | 131 | Builder.CreateLoad(Int32Ty, SelectorAddr, true, "exn_selector_val"); |
202 | 131 | |
203 | 131 | substituteLPadValues(LPI, ExnVal, SelVal); |
204 | 131 | } |
205 | 39 | |
206 | 39 | // Personality function |
207 | 39 | IRBuilder<> Builder(EntryBB->getTerminator()); |
208 | 39 | Value *PersonalityFn = F.getPersonalityFn(); |
209 | 39 | Value *PersonalityFieldPtr = Builder.CreateConstGEP2_32( |
210 | 39 | FunctionContextTy, FuncCtx, 0, 3, "pers_fn_gep"); |
211 | 39 | Builder.CreateStore( |
212 | 39 | Builder.CreateBitCast(PersonalityFn, Builder.getInt8PtrTy()), |
213 | 39 | PersonalityFieldPtr, /*isVolatile=*/true); |
214 | 39 | |
215 | 39 | // LSDA address |
216 | 39 | Value *LSDA = Builder.CreateCall(LSDAAddrFn, {}, "lsda_addr"); |
217 | 39 | Value *LSDAFieldPtr = |
218 | 39 | Builder.CreateConstGEP2_32(FunctionContextTy, FuncCtx, 0, 4, "lsda_gep"); |
219 | 39 | Builder.CreateStore(LSDA, LSDAFieldPtr, /*isVolatile=*/true); |
220 | 39 | |
221 | 39 | return FuncCtx; |
222 | 39 | } |
223 | | |
224 | | /// lowerIncomingArguments - To avoid having to handle incoming arguments |
225 | | /// specially, we lower each arg to a copy instruction in the entry block. This |
226 | | /// ensures that the argument value itself cannot be live out of the entry |
227 | | /// block. |
228 | 39 | void SjLjEHPrepare::lowerIncomingArguments(Function &F) { |
229 | 39 | BasicBlock::iterator AfterAllocaInsPt = F.begin()->begin(); |
230 | 778 | while (isa<AllocaInst>(AfterAllocaInsPt) && |
231 | 778 | cast<AllocaInst>(AfterAllocaInsPt)->isStaticAlloca()739 ) |
232 | 739 | ++AfterAllocaInsPt; |
233 | 39 | assert(AfterAllocaInsPt != F.front().end()); |
234 | 39 | |
235 | 42 | for (auto &AI : F.args()) { |
236 | 42 | // Swift error really is a register that we model as memory -- instruction |
237 | 42 | // selection will perform mem-to-reg for us and spill/reload appropriately |
238 | 42 | // around calls that clobber it. There is no need to spill this |
239 | 42 | // value to the stack and doing so would not be allowed. |
240 | 42 | if (AI.isSwiftError()) |
241 | 2 | continue; |
242 | 40 | |
243 | 40 | Type *Ty = AI.getType(); |
244 | 40 | |
245 | 40 | // Use 'select i8 true, %arg, undef' to simulate a 'no-op' instruction. |
246 | 40 | Value *TrueValue = ConstantInt::getTrue(F.getContext()); |
247 | 40 | Value *UndefValue = UndefValue::get(Ty); |
248 | 40 | Instruction *SI = SelectInst::Create( |
249 | 40 | TrueValue, &AI, UndefValue, AI.getName() + ".tmp", &*AfterAllocaInsPt); |
250 | 40 | AI.replaceAllUsesWith(SI); |
251 | 40 | |
252 | 40 | // Reset the operand, because it was clobbered by the RAUW above. |
253 | 40 | SI->setOperand(1, &AI); |
254 | 40 | } |
255 | 39 | } |
256 | | |
257 | | /// lowerAcrossUnwindEdges - Find all variables which are alive across an unwind |
258 | | /// edge and spill them. |
259 | | void SjLjEHPrepare::lowerAcrossUnwindEdges(Function &F, |
260 | 39 | ArrayRef<InvokeInst *> Invokes) { |
261 | 39 | // Finally, scan the code looking for instructions with bad live ranges. |
262 | 380 | for (BasicBlock &BB : F) { |
263 | 5.39k | for (Instruction &Inst : BB) { |
264 | 5.39k | // Ignore obvious cases we don't have to handle. In particular, most |
265 | 5.39k | // instructions either have no uses or only have a single use inside the |
266 | 5.39k | // current block. Ignore them quickly. |
267 | 5.39k | if (Inst.use_empty()) |
268 | 2.20k | continue; |
269 | 3.19k | if (Inst.hasOneUse() && |
270 | 3.19k | cast<Instruction>(Inst.user_back())->getParent() == &BB2.17k && |
271 | 3.19k | !isa<PHINode>(Inst.user_back())2.15k ) |
272 | 2.15k | continue; |
273 | 1.03k | |
274 | 1.03k | // If this is an alloca in the entry block, it's not a real register |
275 | 1.03k | // value. |
276 | 1.03k | if (auto *AI = dyn_cast<AllocaInst>(&Inst)) |
277 | 733 | if (AI->isStaticAlloca()) |
278 | 733 | continue; |
279 | 306 | |
280 | 306 | // Avoid iterator invalidation by copying users to a temporary vector. |
281 | 306 | SmallVector<Instruction *, 16> Users; |
282 | 1.03k | for (User *U : Inst.users()) { |
283 | 1.03k | Instruction *UI = cast<Instruction>(U); |
284 | 1.03k | if (UI->getParent() != &BB || isa<PHINode>(UI)429 ) |
285 | 606 | Users.push_back(UI); |
286 | 1.03k | } |
287 | 306 | |
288 | 306 | // Find all of the blocks that this value is live in. |
289 | 306 | SmallPtrSet<BasicBlock *, 32> LiveBBs; |
290 | 306 | LiveBBs.insert(&BB); |
291 | 912 | while (!Users.empty()) { |
292 | 606 | Instruction *U = Users.pop_back_val(); |
293 | 606 | |
294 | 606 | if (!isa<PHINode>(U)) { |
295 | 585 | MarkBlocksLiveIn(U->getParent(), LiveBBs); |
296 | 585 | } else { |
297 | 21 | // Uses for a PHI node occur in their predecessor block. |
298 | 21 | PHINode *PN = cast<PHINode>(U); |
299 | 74 | for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i53 ) |
300 | 53 | if (PN->getIncomingValue(i) == &Inst) |
301 | 27 | MarkBlocksLiveIn(PN->getIncomingBlock(i), LiveBBs); |
302 | 21 | } |
303 | 606 | } |
304 | 306 | |
305 | 306 | // Now that we know all of the blocks that this thing is live in, see if |
306 | 306 | // it includes any of the unwind locations. |
307 | 306 | bool NeedsSpill = false; |
308 | 6.93k | for (InvokeInst *Invoke : Invokes) { |
309 | 6.93k | BasicBlock *UnwindBlock = Invoke->getUnwindDest(); |
310 | 6.93k | if (UnwindBlock != &BB && LiveBBs.count(UnwindBlock)6.82k ) { |
311 | 92 | LLVM_DEBUG(dbgs() << "SJLJ Spill: " << Inst << " around " |
312 | 92 | << UnwindBlock->getName() << "\n"); |
313 | 92 | NeedsSpill = true; |
314 | 92 | break; |
315 | 92 | } |
316 | 6.93k | } |
317 | 306 | |
318 | 306 | // If we decided we need a spill, do it. |
319 | 306 | // FIXME: Spilling this way is overkill, as it forces all uses of |
320 | 306 | // the value to be reloaded from the stack slot, even those that aren't |
321 | 306 | // in the unwind blocks. We should be more selective. |
322 | 306 | if (NeedsSpill) { |
323 | 92 | DemoteRegToStack(Inst, true); |
324 | 92 | ++NumSpilled; |
325 | 92 | } |
326 | 306 | } |
327 | 380 | } |
328 | 39 | |
329 | 39 | // Go through the landing pads and remove any PHIs there. |
330 | 178 | for (InvokeInst *Invoke : Invokes) { |
331 | 178 | BasicBlock *UnwindBlock = Invoke->getUnwindDest(); |
332 | 178 | LandingPadInst *LPI = UnwindBlock->getLandingPadInst(); |
333 | 178 | |
334 | 178 | // Place PHIs into a set to avoid invalidating the iterator. |
335 | 178 | SmallPtrSet<PHINode *, 8> PHIsToDemote; |
336 | 178 | for (BasicBlock::iterator PN = UnwindBlock->begin(); isa<PHINode>(PN); ++PN0 ) |
337 | 0 | PHIsToDemote.insert(cast<PHINode>(PN)); |
338 | 178 | if (PHIsToDemote.empty()) |
339 | 178 | continue; |
340 | 0 | |
341 | 0 | // Demote the PHIs to the stack. |
342 | 0 | for (PHINode *PN : PHIsToDemote) |
343 | 0 | DemotePHIToStack(PN); |
344 | 0 |
|
345 | 0 | // Move the landingpad instruction back to the top of the landing pad block. |
346 | 0 | LPI->moveBefore(&UnwindBlock->front()); |
347 | 0 | } |
348 | 39 | } |
349 | | |
350 | | /// setupEntryBlockAndCallSites - Setup the entry block by creating and filling |
351 | | /// the function context and marking the call sites with the appropriate |
352 | | /// values. These values are used by the DWARF EH emitter. |
353 | 8.31k | bool SjLjEHPrepare::setupEntryBlockAndCallSites(Function &F) { |
354 | 8.31k | SmallVector<ReturnInst *, 16> Returns; |
355 | 8.31k | SmallVector<InvokeInst *, 16> Invokes; |
356 | 8.31k | SmallSetVector<LandingPadInst *, 16> LPads; |
357 | 8.31k | |
358 | 8.31k | // Look through the terminators of the basic blocks to find invokes. |
359 | 8.31k | for (BasicBlock &BB : F) |
360 | 55.5k | if (auto *II = dyn_cast<InvokeInst>(BB.getTerminator())) { |
361 | 178 | if (Function *Callee = II->getCalledFunction()) |
362 | 175 | if (Callee->getIntrinsicID() == Intrinsic::donothing) { |
363 | 0 | // Remove the NOP invoke. |
364 | 0 | BranchInst::Create(II->getNormalDest(), II); |
365 | 0 | II->eraseFromParent(); |
366 | 0 | continue; |
367 | 0 | } |
368 | 178 | |
369 | 178 | Invokes.push_back(II); |
370 | 178 | LPads.insert(II->getUnwindDest()->getLandingPadInst()); |
371 | 55.3k | } else if (auto *RI = dyn_cast<ReturnInst>(BB.getTerminator())) { |
372 | 9.11k | Returns.push_back(RI); |
373 | 9.11k | } |
374 | 8.31k | |
375 | 8.31k | if (Invokes.empty()) |
376 | 8.27k | return false; |
377 | 39 | |
378 | 39 | NumInvokes += Invokes.size(); |
379 | 39 | |
380 | 39 | lowerIncomingArguments(F); |
381 | 39 | lowerAcrossUnwindEdges(F, Invokes); |
382 | 39 | |
383 | 39 | Value *FuncCtx = |
384 | 39 | setupFunctionContext(F, makeArrayRef(LPads.begin(), LPads.end())); |
385 | 39 | BasicBlock *EntryBB = &F.front(); |
386 | 39 | IRBuilder<> Builder(EntryBB->getTerminator()); |
387 | 39 | |
388 | 39 | // Get a reference to the jump buffer. |
389 | 39 | Value *JBufPtr = |
390 | 39 | Builder.CreateConstGEP2_32(FunctionContextTy, FuncCtx, 0, 5, "jbuf_gep"); |
391 | 39 | |
392 | 39 | // Save the frame pointer. |
393 | 39 | Value *FramePtr = Builder.CreateConstGEP2_32(doubleUnderJBufTy, JBufPtr, 0, 0, |
394 | 39 | "jbuf_fp_gep"); |
395 | 39 | |
396 | 39 | Value *Val = Builder.CreateCall(FrameAddrFn, Builder.getInt32(0), "fp"); |
397 | 39 | Builder.CreateStore(Val, FramePtr, /*isVolatile=*/true); |
398 | 39 | |
399 | 39 | // Save the stack pointer. |
400 | 39 | Value *StackPtr = Builder.CreateConstGEP2_32(doubleUnderJBufTy, JBufPtr, 0, 2, |
401 | 39 | "jbuf_sp_gep"); |
402 | 39 | |
403 | 39 | Val = Builder.CreateCall(StackAddrFn, {}, "sp"); |
404 | 39 | Builder.CreateStore(Val, StackPtr, /*isVolatile=*/true); |
405 | 39 | |
406 | 39 | // Call the setup_dispatch instrinsic. It fills in the rest of the jmpbuf. |
407 | 39 | Builder.CreateCall(BuiltinSetupDispatchFn, {}); |
408 | 39 | |
409 | 39 | // Store a pointer to the function context so that the back-end will know |
410 | 39 | // where to look for it. |
411 | 39 | Value *FuncCtxArg = Builder.CreateBitCast(FuncCtx, Builder.getInt8PtrTy()); |
412 | 39 | Builder.CreateCall(FuncCtxFn, FuncCtxArg); |
413 | 39 | |
414 | 39 | // At this point, we are all set up, update the invoke instructions to mark |
415 | 39 | // their call_site values. |
416 | 217 | for (unsigned I = 0, E = Invokes.size(); I != E; ++I178 ) { |
417 | 178 | insertCallSiteStore(Invokes[I], I + 1); |
418 | 178 | |
419 | 178 | ConstantInt *CallSiteNum = |
420 | 178 | ConstantInt::get(Type::getInt32Ty(F.getContext()), I + 1); |
421 | 178 | |
422 | 178 | // Record the call site value for the back end so it stays associated with |
423 | 178 | // the invoke. |
424 | 178 | CallInst::Create(CallSiteFn, CallSiteNum, "", Invokes[I]); |
425 | 178 | } |
426 | 39 | |
427 | 39 | // Mark call instructions that aren't nounwind as no-action (call_site == |
428 | 39 | // -1). Skip the entry block, as prior to then, no function context has been |
429 | 39 | // created for this function and any unexpected exceptions thrown will go |
430 | 39 | // directly to the caller's context, which is what we want anyway, so no need |
431 | 39 | // to do anything here. |
432 | 380 | for (BasicBlock &BB : F) { |
433 | 380 | if (&BB == &F.front()) |
434 | 39 | continue; |
435 | 341 | for (Instruction &I : BB) |
436 | 5.49k | if (I.mayThrow()) |
437 | 786 | insertCallSiteStore(&I, -1); |
438 | 341 | } |
439 | 39 | |
440 | 39 | // Register the function context and make sure it's known to not throw |
441 | 39 | CallInst *Register = |
442 | 39 | CallInst::Create(RegisterFn, FuncCtx, "", EntryBB->getTerminator()); |
443 | 39 | Register->setDoesNotThrow(); |
444 | 39 | |
445 | 39 | // Following any allocas not in the entry block, update the saved SP in the |
446 | 39 | // jmpbuf to the new value. |
447 | 380 | for (BasicBlock &BB : F) { |
448 | 380 | if (&BB == &F.front()) |
449 | 39 | continue; |
450 | 7.06k | for (Instruction &I : BB)341 { |
451 | 7.06k | if (auto *CI = dyn_cast<CallInst>(&I)) { |
452 | 1.26k | if (CI->getCalledFunction() != StackRestoreFn) |
453 | 1.26k | continue; |
454 | 5.79k | } else if (!isa<AllocaInst>(&I)) { |
455 | 5.79k | continue; |
456 | 5.79k | } |
457 | 0 | Instruction *StackAddr = CallInst::Create(StackAddrFn, "sp"); |
458 | 0 | StackAddr->insertAfter(&I); |
459 | 0 | Instruction *StoreStackAddr = new StoreInst(StackAddr, StackPtr, true); |
460 | 0 | StoreStackAddr->insertAfter(StackAddr); |
461 | 0 | } |
462 | 341 | } |
463 | 39 | |
464 | 39 | // Finally, for any returns from this function, if this function contains an |
465 | 39 | // invoke, add a call to unregister the function context. |
466 | 39 | for (ReturnInst *Return : Returns) |
467 | 34 | CallInst::Create(UnregisterFn, FuncCtx, "", Return); |
468 | 39 | |
469 | 39 | return true; |
470 | 39 | } |
471 | | |
472 | 8.31k | bool SjLjEHPrepare::runOnFunction(Function &F) { |
473 | 8.31k | Module &M = *F.getParent(); |
474 | 8.31k | RegisterFn = M.getOrInsertFunction( |
475 | 8.31k | "_Unwind_SjLj_Register", Type::getVoidTy(M.getContext()), |
476 | 8.31k | PointerType::getUnqual(FunctionContextTy)); |
477 | 8.31k | UnregisterFn = M.getOrInsertFunction( |
478 | 8.31k | "_Unwind_SjLj_Unregister", Type::getVoidTy(M.getContext()), |
479 | 8.31k | PointerType::getUnqual(FunctionContextTy)); |
480 | 8.31k | FrameAddrFn = Intrinsic::getDeclaration( |
481 | 8.31k | &M, Intrinsic::frameaddress, |
482 | 8.31k | {Type::getInt8PtrTy(M.getContext(), |
483 | 8.31k | M.getDataLayout().getAllocaAddrSpace())}); |
484 | 8.31k | StackAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::stacksave); |
485 | 8.31k | StackRestoreFn = Intrinsic::getDeclaration(&M, Intrinsic::stackrestore); |
486 | 8.31k | BuiltinSetupDispatchFn = |
487 | 8.31k | Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_setup_dispatch); |
488 | 8.31k | LSDAAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_lsda); |
489 | 8.31k | CallSiteFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_callsite); |
490 | 8.31k | FuncCtxFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_functioncontext); |
491 | 8.31k | |
492 | 8.31k | bool Res = setupEntryBlockAndCallSites(F); |
493 | 8.31k | return Res; |
494 | 8.31k | } |