/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/Target/Mips/MCTargetDesc/MipsNaClELFStreamer.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===-- MipsNaClELFStreamer.cpp - ELF Object Output for Mips NaCl ---------===// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // This file implements MCELFStreamer for Mips NaCl. It emits .o object files |
10 | | // as required by NaCl's SFI sandbox. It inserts address-masking instructions |
11 | | // before dangerous control-flow and memory access instructions. It inserts |
12 | | // address-masking instructions after instructions that change the stack |
13 | | // pointer. It ensures that the mask and the dangerous instruction are always |
14 | | // emitted in the same bundle. It aligns call + branch delay to the bundle end, |
15 | | // so that return address is always aligned to the start of next bundle. |
16 | | // |
17 | | //===----------------------------------------------------------------------===// |
18 | | |
19 | | #include "Mips.h" |
20 | | #include "MipsELFStreamer.h" |
21 | | #include "MipsMCNaCl.h" |
22 | | #include "llvm/MC/MCAsmBackend.h" |
23 | | #include "llvm/MC/MCAssembler.h" |
24 | | #include "llvm/MC/MCCodeEmitter.h" |
25 | | #include "llvm/MC/MCELFStreamer.h" |
26 | | #include "llvm/MC/MCInst.h" |
27 | | #include "llvm/MC/MCObjectWriter.h" |
28 | | #include "llvm/Support/ErrorHandling.h" |
29 | | #include <cassert> |
30 | | |
31 | | using namespace llvm; |
32 | | |
33 | | #define DEBUG_TYPE "mips-mc-nacl" |
34 | | |
35 | | namespace { |
36 | | |
37 | | const unsigned IndirectBranchMaskReg = Mips::T6; |
38 | | const unsigned LoadStoreStackMaskReg = Mips::T7; |
39 | | |
40 | | /// Extend the generic MCELFStreamer class so that it can mask dangerous |
41 | | /// instructions. |
42 | | |
43 | | class MipsNaClELFStreamer : public MipsELFStreamer { |
44 | | public: |
45 | | MipsNaClELFStreamer(MCContext &Context, std::unique_ptr<MCAsmBackend> TAB, |
46 | | std::unique_ptr<MCObjectWriter> OW, |
47 | | std::unique_ptr<MCCodeEmitter> Emitter) |
48 | | : MipsELFStreamer(Context, std::move(TAB), std::move(OW), |
49 | 1 | std::move(Emitter)) {} |
50 | | |
51 | 1 | ~MipsNaClELFStreamer() override = default; |
52 | | |
53 | | private: |
54 | | // Whether we started the sandboxing sequence for calls. Calls are bundled |
55 | | // with branch delays and aligned to the bundle end. |
56 | | bool PendingCall = false; |
57 | | |
58 | 56 | bool isIndirectJump(const MCInst &MI) { |
59 | 56 | if (MI.getOpcode() == Mips::JALR) { |
60 | 2 | // MIPS32r6/MIPS64r6 doesn't have a JR instruction and uses JALR instead. |
61 | 2 | // JALR is an indirect branch if the link register is $0. |
62 | 2 | assert(MI.getOperand(0).isReg()); |
63 | 2 | return MI.getOperand(0).getReg() == Mips::ZERO; |
64 | 2 | } |
65 | 54 | return MI.getOpcode() == Mips::JR; |
66 | 54 | } |
67 | | |
68 | 54 | bool isStackPointerFirstOperand(const MCInst &MI) { |
69 | 54 | return (MI.getNumOperands() > 0 && MI.getOperand(0).isReg() |
70 | 54 | && MI.getOperand(0).getReg() == Mips::SP52 ); |
71 | 54 | } |
72 | | |
73 | 32 | bool isCall(const MCInst &MI, bool *IsIndirectCall) { |
74 | 32 | unsigned Opcode = MI.getOpcode(); |
75 | 32 | |
76 | 32 | *IsIndirectCall = false; |
77 | 32 | |
78 | 32 | switch (Opcode) { |
79 | 32 | default: |
80 | 24 | return false; |
81 | 32 | |
82 | 32 | case Mips::JAL: |
83 | 6 | case Mips::BAL: |
84 | 6 | case Mips::BAL_BR: |
85 | 6 | case Mips::BLTZAL: |
86 | 6 | case Mips::BGEZAL: |
87 | 6 | return true; |
88 | 6 | |
89 | 6 | case Mips::JALR: |
90 | 2 | // JALR is only a call if the link register is not $0. Otherwise it's an |
91 | 2 | // indirect branch. |
92 | 2 | assert(MI.getOperand(0).isReg()); |
93 | 2 | if (MI.getOperand(0).getReg() == Mips::ZERO) |
94 | 0 | return false; |
95 | 2 | |
96 | 2 | *IsIndirectCall = true; |
97 | 2 | return true; |
98 | 32 | } |
99 | 32 | } |
100 | | |
101 | | void emitMask(unsigned AddrReg, unsigned MaskReg, |
102 | 27 | const MCSubtargetInfo &STI) { |
103 | 27 | MCInst MaskInst; |
104 | 27 | MaskInst.setOpcode(Mips::AND); |
105 | 27 | MaskInst.addOperand(MCOperand::createReg(AddrReg)); |
106 | 27 | MaskInst.addOperand(MCOperand::createReg(AddrReg)); |
107 | 27 | MaskInst.addOperand(MCOperand::createReg(MaskReg)); |
108 | 27 | MipsELFStreamer::EmitInstruction(MaskInst, STI); |
109 | 27 | } |
110 | | |
111 | | // Sandbox indirect branch or return instruction by inserting mask operation |
112 | | // before it. |
113 | 2 | void sandboxIndirectJump(const MCInst &MI, const MCSubtargetInfo &STI) { |
114 | 2 | unsigned AddrReg = MI.getOperand(0).getReg(); |
115 | 2 | |
116 | 2 | EmitBundleLock(false); |
117 | 2 | emitMask(AddrReg, IndirectBranchMaskReg, STI); |
118 | 2 | MipsELFStreamer::EmitInstruction(MI, STI); |
119 | 2 | EmitBundleUnlock(); |
120 | 2 | } |
121 | | |
122 | | // Sandbox memory access or SP change. Insert mask operation before and/or |
123 | | // after the instruction. |
124 | | void sandboxLoadStoreStackChange(const MCInst &MI, unsigned AddrIdx, |
125 | | const MCSubtargetInfo &STI, bool MaskBefore, |
126 | 22 | bool MaskAfter) { |
127 | 22 | EmitBundleLock(false); |
128 | 22 | if (MaskBefore) { |
129 | 19 | // Sandbox memory access. |
130 | 19 | unsigned BaseReg = MI.getOperand(AddrIdx).getReg(); |
131 | 19 | emitMask(BaseReg, LoadStoreStackMaskReg, STI); |
132 | 19 | } |
133 | 22 | MipsELFStreamer::EmitInstruction(MI, STI); |
134 | 22 | if (MaskAfter) { |
135 | 4 | // Sandbox SP change. |
136 | 4 | unsigned SPReg = MI.getOperand(0).getReg(); |
137 | 4 | assert((Mips::SP == SPReg) && "Unexpected stack-pointer register."); |
138 | 4 | emitMask(SPReg, LoadStoreStackMaskReg, STI); |
139 | 4 | } |
140 | 22 | EmitBundleUnlock(); |
141 | 22 | } |
142 | | |
143 | | public: |
144 | | /// This function is the one used to emit instruction data into the ELF |
145 | | /// streamer. We override it to mask dangerous instructions. |
146 | | void EmitInstruction(const MCInst &Inst, |
147 | 56 | const MCSubtargetInfo &STI) override { |
148 | 56 | // Sandbox indirect jumps. |
149 | 56 | if (isIndirectJump(Inst)) { |
150 | 2 | if (PendingCall) |
151 | 0 | report_fatal_error("Dangerous instruction in branch delay slot!"); |
152 | 2 | sandboxIndirectJump(Inst, STI); |
153 | 2 | return; |
154 | 2 | } |
155 | 54 | |
156 | 54 | // Sandbox loads, stores and SP changes. |
157 | 54 | unsigned AddrIdx; |
158 | 54 | bool IsStore; |
159 | 54 | bool IsMemAccess = isBasePlusOffsetMemoryAccess(Inst.getOpcode(), &AddrIdx, |
160 | 54 | &IsStore); |
161 | 54 | bool IsSPFirstOperand = isStackPointerFirstOperand(Inst); |
162 | 54 | if (IsMemAccess || IsSPFirstOperand26 ) { |
163 | 30 | bool MaskBefore = (IsMemAccess |
164 | 30 | && baseRegNeedsLoadStoreMask(Inst.getOperand(AddrIdx) |
165 | 28 | .getReg())); |
166 | 30 | bool MaskAfter = IsSPFirstOperand && !IsStore6 ; |
167 | 30 | if (MaskBefore || MaskAfter11 ) { |
168 | 22 | if (PendingCall) |
169 | 0 | report_fatal_error("Dangerous instruction in branch delay slot!"); |
170 | 22 | sandboxLoadStoreStackChange(Inst, AddrIdx, STI, MaskBefore, MaskAfter); |
171 | 22 | return; |
172 | 22 | } |
173 | 30 | // fallthrough |
174 | 30 | } |
175 | 32 | |
176 | 32 | // Sandbox calls by aligning call and branch delay to the bundle end. |
177 | 32 | // For indirect calls, emit the mask before the call. |
178 | 32 | bool IsIndirectCall; |
179 | 32 | if (isCall(Inst, &IsIndirectCall)) { |
180 | 8 | if (PendingCall) |
181 | 0 | report_fatal_error("Dangerous instruction in branch delay slot!"); |
182 | 8 | |
183 | 8 | // Start the sandboxing sequence by emitting call. |
184 | 8 | EmitBundleLock(true); |
185 | 8 | if (IsIndirectCall) { |
186 | 2 | unsigned TargetReg = Inst.getOperand(1).getReg(); |
187 | 2 | emitMask(TargetReg, IndirectBranchMaskReg, STI); |
188 | 2 | } |
189 | 8 | MipsELFStreamer::EmitInstruction(Inst, STI); |
190 | 8 | PendingCall = true; |
191 | 8 | return; |
192 | 8 | } |
193 | 24 | if (PendingCall) { |
194 | 8 | // Finish the sandboxing sequence by emitting branch delay. |
195 | 8 | MipsELFStreamer::EmitInstruction(Inst, STI); |
196 | 8 | EmitBundleUnlock(); |
197 | 8 | PendingCall = false; |
198 | 8 | return; |
199 | 8 | } |
200 | 16 | |
201 | 16 | // None of the sandboxing applies, just emit the instruction. |
202 | 16 | MipsELFStreamer::EmitInstruction(Inst, STI); |
203 | 16 | } |
204 | | }; |
205 | | |
206 | | } // end anonymous namespace |
207 | | |
208 | | namespace llvm { |
209 | | |
210 | | bool isBasePlusOffsetMemoryAccess(unsigned Opcode, unsigned *AddrIdx, |
211 | 84 | bool *IsStore) { |
212 | 84 | if (IsStore) |
213 | 54 | *IsStore = false; |
214 | 84 | |
215 | 84 | switch (Opcode) { |
216 | 84 | default: |
217 | 48 | return false; |
218 | 84 | |
219 | 84 | // Load instructions with base address register in position 1. |
220 | 84 | case Mips::LB: |
221 | 16 | case Mips::LBu: |
222 | 16 | case Mips::LH: |
223 | 16 | case Mips::LHu: |
224 | 16 | case Mips::LW: |
225 | 16 | case Mips::LWC1: |
226 | 16 | case Mips::LDC1: |
227 | 16 | case Mips::LL: |
228 | 16 | case Mips::LL_R6: |
229 | 16 | case Mips::LWL: |
230 | 16 | case Mips::LWR: |
231 | 16 | *AddrIdx = 1; |
232 | 16 | return true; |
233 | 16 | |
234 | 16 | // Store instructions with base address register in position 1. |
235 | 19 | case Mips::SB: |
236 | 19 | case Mips::SH: |
237 | 19 | case Mips::SW: |
238 | 19 | case Mips::SWC1: |
239 | 19 | case Mips::SDC1: |
240 | 19 | case Mips::SWL: |
241 | 19 | case Mips::SWR: |
242 | 19 | *AddrIdx = 1; |
243 | 19 | if (IsStore) |
244 | 12 | *IsStore = true; |
245 | 19 | return true; |
246 | 19 | |
247 | 19 | // Store instructions with base address register in position 2. |
248 | 19 | case Mips::SC: |
249 | 1 | case Mips::SC_R6: |
250 | 1 | *AddrIdx = 2; |
251 | 1 | if (IsStore) |
252 | 1 | *IsStore = true; |
253 | 1 | return true; |
254 | 84 | } |
255 | 84 | } |
256 | | |
257 | 36 | bool baseRegNeedsLoadStoreMask(unsigned Reg) { |
258 | 36 | // The contents of SP and thread pointer register do not require masking. |
259 | 36 | return Reg != Mips::SP && Reg != Mips::T830 ; |
260 | 36 | } |
261 | | |
262 | | MCELFStreamer *createMipsNaClELFStreamer(MCContext &Context, |
263 | | std::unique_ptr<MCAsmBackend> TAB, |
264 | | std::unique_ptr<MCObjectWriter> OW, |
265 | | std::unique_ptr<MCCodeEmitter> Emitter, |
266 | 1 | bool RelaxAll) { |
267 | 1 | MipsNaClELFStreamer *S = new MipsNaClELFStreamer( |
268 | 1 | Context, std::move(TAB), std::move(OW), std::move(Emitter)); |
269 | 1 | if (RelaxAll) |
270 | 0 | S->getAssembler().setRelaxAll(true); |
271 | 1 | |
272 | 1 | // Set bundle-alignment as required by the NaCl ABI for the target. |
273 | 1 | S->EmitBundleAlignMode(MIPS_NACL_BUNDLE_ALIGN); |
274 | 1 | |
275 | 1 | return S; |
276 | 1 | } |
277 | | |
278 | | } // end namespace llvm |