llvm.org GIT mirror llvm / b714d34
Move SafeStack to CodeGen. It depends on the target machinery, that's not available for instrumentation passes. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@258942 91177308-0d34-0410-b5e6-96231b3b80d8 Benjamin Kramer 4 years ago
9 changed file(s) with 783 addition(s) and 782 deletion(s). Raw diff Collapse all Expand all
666666 Pass *createGlobalMergePass(const TargetMachine *TM, unsigned MaximalOffset,
667667 bool OnlyOptimizeForSize = false,
668668 bool MergeExternalByDefault = false);
669
670 /// This pass splits the stack into a safe stack and an unsafe stack to
671 /// protect against stack-based overflow vulnerabilities.
672 FunctionPass *createSafeStackPass(const TargetMachine *TM = nullptr);
669673 } // End llvm namespace
670674
671675 /// Target machine pass initializer for passes with dependencies. Use with
149149 // checking on loads, stores, and other memory intrinsics.
150150 FunctionPass *createBoundsCheckingPass();
151151
152 /// \brief This pass splits the stack into a safe stack and an unsafe stack to
153 /// protect against stack-based overflow vulnerabilities.
154 FunctionPass *createSafeStackPass(const TargetMachine *TM = nullptr);
155
156152 /// \brief Calculate what to divide by to scale counts.
157153 ///
158154 /// Given the maximum count, calculate a divisor that will scale all the
1515 CodeGen.cpp
1616 CodeGenPrepare.cpp
1717 CriticalAntiDepBreaker.cpp
18 DeadMachineInstructionElim.cpp
1819 DFAPacketizer.cpp
19 DeadMachineInstructionElim.cpp
2020 DwarfEHPrepare.cpp
2121 EarlyIfConversion.cpp
2222 EdgeBundles.cpp
2323 ExecutionDepsFix.cpp
2424 ExpandISelPseudos.cpp
2525 ExpandPostRAPseudos.cpp
26 LiveDebugValues.cpp
2726 FaultMaps.cpp
2827 FuncletLayout.cpp
2928 GCMetadata.cpp
3736 InterferenceCache.cpp
3837 InterleavedAccessPass.cpp
3938 IntrinsicLowering.cpp
40 LLVMTargetMachine.cpp
4139 LatencyPriorityQueue.cpp
4240 LexicalScopes.cpp
41 LiveDebugValues.cpp
4342 LiveDebugVariables.cpp
43 LiveIntervalAnalysis.cpp
4444 LiveInterval.cpp
45 LiveIntervalAnalysis.cpp
4645 LiveIntervalUnion.cpp
46 LivePhysRegs.cpp
4747 LiveRangeCalc.cpp
4848 LiveRangeEdit.cpp
4949 LiveRegMatrix.cpp
50 LivePhysRegs.cpp
5150 LiveStackAnalysis.cpp
5251 LiveVariables.cpp
52 LLVMTargetMachine.cpp
5353 LocalStackSlotAllocation.cpp
5454 LowerEmuTLS.cpp
5555 MachineBasicBlock.cpp
5656 MachineBlockFrequencyInfo.cpp
5757 MachineBlockPlacement.cpp
5858 MachineBranchProbabilityInfo.cpp
59 MachineCSE.cpp
6059 MachineCombiner.cpp
6160 MachineCopyPropagation.cpp
61 MachineCSE.cpp
62 MachineDominanceFrontier.cpp
6263 MachineDominators.cpp
63 MachineDominanceFrontier.cpp
64 MachineFunctionAnalysis.cpp
6465 MachineFunction.cpp
65 MachineFunctionAnalysis.cpp
6666 MachineFunctionPass.cpp
6767 MachineFunctionPrinterPass.cpp
68 MachineInstrBundle.cpp
6869 MachineInstr.cpp
69 MachineInstrBundle.cpp
7070 MachineLICM.cpp
7171 MachineLoopInfo.cpp
7272 MachineModuleInfo.cpp
7373 MachineModuleInfoImpls.cpp
7474 MachinePassRegistry.cpp
7575 MachinePostDominators.cpp
76 MachineRegionInfo.cpp
7677 MachineRegisterInfo.cpp
77 MachineRegionInfo.cpp
78 MachineSSAUpdater.cpp
7978 MachineScheduler.cpp
8079 MachineSink.cpp
80 MachineSSAUpdater.cpp
8181 MachineTraceMetrics.cpp
8282 MachineVerifier.cpp
8383 MIRPrinter.cpp
8484 MIRPrintingPass.cpp
8585 OptimizePHIs.cpp
86 PHIElimination.cpp
87 PHIEliminationUtils.cpp
8886 ParallelCG.cpp
8987 Passes.cpp
9088 PeepholeOptimizer.cpp
89 PHIElimination.cpp
90 PHIEliminationUtils.cpp
9191 PostRASchedulerList.cpp
9292 ProcessImplicitDefs.cpp
9393 PrologEpilogInserter.cpp
101101 RegisterCoalescer.cpp
102102 RegisterPressure.cpp
103103 RegisterScavenging.cpp
104 SafeStack.cpp
104105 ScheduleDAG.cpp
105106 ScheduleDAGInstrs.cpp
106107 ScheduleDAGPrinter.cpp
107108 ScoreboardHazardRecognizer.cpp
109 ShadowStackGCLowering.cpp
108110 ShrinkWrap.cpp
109 ShadowStackGCLowering.cpp
110111 SjLjEHPrepare.cpp
111112 SlotIndexes.cpp
112113 SpillPlacement.cpp
113114 SplitKit.cpp
114115 StackColoring.cpp
116 StackMapLivenessAnalysis.cpp
117 StackMaps.cpp
115118 StackProtector.cpp
116119 StackSlotColoring.cpp
117 StackMapLivenessAnalysis.cpp
118 StackMaps.cpp
119120 TailDuplication.cpp
120121 TargetFrameLoweringImpl.cpp
121122 TargetInstrInfo.cpp
6767 initializeStackColoringPass(Registry);
6868 initializeStackMapLivenessPass(Registry);
6969 initializeLiveDebugValuesPass(Registry);
70 initializeSafeStackPass(Registry);
7071 initializeStackProtectorPass(Registry);
7172 initializeStackSlotColoringPass(Registry);
7273 initializeTailDuplicatePassPass(Registry);
0 //===-- SafeStack.cpp - Safe Stack Insertion ------------------------------===//
1 //
2 // The LLVM Compiler Infrastructure
3 //
4 // This file is distributed under the University of Illinois Open Source
5 // License. See LICENSE.TXT for details.
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass splits the stack into the safe stack (kept as-is for LLVM backend)
10 // and the unsafe stack (explicitly allocated and managed through the runtime
11 // support library).
12 //
13 // http://clang.llvm.org/docs/SafeStack.html
14 //
15 //===----------------------------------------------------------------------===//
16
17 #include "llvm/ADT/Statistic.h"
18 #include "llvm/ADT/Triple.h"
19 #include "llvm/Analysis/ScalarEvolution.h"
20 #include "llvm/Analysis/ScalarEvolutionExpressions.h"
21 #include "llvm/CodeGen/Passes.h"
22 #include "llvm/CodeGen/Passes.h"
23 #include "llvm/IR/Constants.h"
24 #include "llvm/IR/DIBuilder.h"
25 #include "llvm/IR/DataLayout.h"
26 #include "llvm/IR/DerivedTypes.h"
27 #include "llvm/IR/Function.h"
28 #include "llvm/IR/IRBuilder.h"
29 #include "llvm/IR/InstIterator.h"
30 #include "llvm/IR/Instructions.h"
31 #include "llvm/IR/IntrinsicInst.h"
32 #include "llvm/IR/Intrinsics.h"
33 #include "llvm/IR/Module.h"
34 #include "llvm/Pass.h"
35 #include "llvm/Support/CommandLine.h"
36 #include "llvm/Support/Debug.h"
37 #include "llvm/Support/Format.h"
38 #include "llvm/Support/MathExtras.h"
39 #include "llvm/Support/raw_os_ostream.h"
40 #include "llvm/Target/TargetLowering.h"
41 #include "llvm/Target/TargetSubtargetInfo.h"
42 #include "llvm/Transforms/Utils/Local.h"
43 #include "llvm/Transforms/Utils/ModuleUtils.h"
44
45 using namespace llvm;
46
47 #define DEBUG_TYPE "safestack"
48
49 enum UnsafeStackPtrStorageVal { ThreadLocalUSP, SingleThreadUSP };
50
51 static cl::opt USPStorage("safe-stack-usp-storage",
52 cl::Hidden, cl::init(ThreadLocalUSP),
53 cl::desc("Type of storage for the unsafe stack pointer"),
54 cl::values(clEnumValN(ThreadLocalUSP, "thread-local",
55 "Thread-local storage"),
56 clEnumValN(SingleThreadUSP, "single-thread",
57 "Non-thread-local storage"),
58 clEnumValEnd));
59
60 namespace llvm {
61
62 STATISTIC(NumFunctions, "Total number of functions");
63 STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack");
64 STATISTIC(NumUnsafeStackRestorePointsFunctions,
65 "Number of functions that use setjmp or exceptions");
66
67 STATISTIC(NumAllocas, "Total number of allocas");
68 STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas");
69 STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas");
70 STATISTIC(NumUnsafeByValArguments, "Number of unsafe byval arguments");
71 STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads");
72
73 } // namespace llvm
74
75 namespace {
76
77 /// Rewrite an SCEV expression for a memory access address to an expression that
78 /// represents offset from the given alloca.
79 ///
80 /// The implementation simply replaces all mentions of the alloca with zero.
81 class AllocaOffsetRewriter : public SCEVRewriteVisitor {
82 const Value *AllocaPtr;
83
84 public:
85 AllocaOffsetRewriter(ScalarEvolution &SE, const Value *AllocaPtr)
86 : SCEVRewriteVisitor(SE), AllocaPtr(AllocaPtr) {}
87
88 const SCEV *visitUnknown(const SCEVUnknown *Expr) {
89 if (Expr->getValue() == AllocaPtr)
90 return SE.getZero(Expr->getType());
91 return Expr;
92 }
93 };
94
95 /// The SafeStack pass splits the stack of each function into the safe
96 /// stack, which is only accessed through memory safe dereferences (as
97 /// determined statically), and the unsafe stack, which contains all
98 /// local variables that are accessed in ways that we can't prove to
99 /// be safe.
100 class SafeStack : public FunctionPass {
101 const TargetMachine *TM;
102 const TargetLoweringBase *TL;
103 const DataLayout *DL;
104 ScalarEvolution *SE;
105
106 Type *StackPtrTy;
107 Type *IntPtrTy;
108 Type *Int32Ty;
109 Type *Int8Ty;
110
111 Value *UnsafeStackPtr = nullptr;
112
113 /// Unsafe stack alignment. Each stack frame must ensure that the stack is
114 /// aligned to this value. We need to re-align the unsafe stack if the
115 /// alignment of any object on the stack exceeds this value.
116 ///
117 /// 16 seems like a reasonable upper bound on the alignment of objects that we
118 /// might expect to appear on the stack on most common targets.
119 enum { StackAlignment = 16 };
120
121 /// \brief Build a value representing a pointer to the unsafe stack pointer.
122 Value *getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F);
123
124 /// \brief Find all static allocas, dynamic allocas, return instructions and
125 /// stack restore points (exception unwind blocks and setjmp calls) in the
126 /// given function and append them to the respective vectors.
127 void findInsts(Function &F, SmallVectorImpl &StaticAllocas,
128 SmallVectorImpl &DynamicAllocas,
129 SmallVectorImpl &ByValArguments,
130 SmallVectorImpl &Returns,
131 SmallVectorImpl &StackRestorePoints);
132
133 /// \brief Calculate the allocation size of a given alloca. Returns 0 if the
134 /// size can not be statically determined.
135 uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI);
136
137 /// \brief Allocate space for all static allocas in \p StaticAllocas,
138 /// replace allocas with pointers into the unsafe stack and generate code to
139 /// restore the stack pointer before all return instructions in \p Returns.
140 ///
141 /// \returns A pointer to the top of the unsafe stack after all unsafe static
142 /// allocas are allocated.
143 Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F,
144 ArrayRef StaticAllocas,
145 ArrayRef ByValArguments,
146 ArrayRef Returns);
147
148 /// \brief Generate code to restore the stack after all stack restore points
149 /// in \p StackRestorePoints.
150 ///
151 /// \returns A local variable in which to maintain the dynamic top of the
152 /// unsafe stack if needed.
153 AllocaInst *
154 createStackRestorePoints(IRBuilder<> &IRB, Function &F,
155 ArrayRef StackRestorePoints,
156 Value *StaticTop, bool NeedDynamicTop);
157
158 /// \brief Replace all allocas in \p DynamicAllocas with code to allocate
159 /// space dynamically on the unsafe stack and store the dynamic unsafe stack
160 /// top to \p DynamicTop if non-null.
161 void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr,
162 AllocaInst *DynamicTop,
163 ArrayRef DynamicAllocas);
164
165 bool IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize);
166
167 bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
168 const Value *AllocaPtr, uint64_t AllocaSize);
169 bool IsAccessSafe(Value *Addr, uint64_t Size, const Value *AllocaPtr,
170 uint64_t AllocaSize);
171
172 public:
173 static char ID; // Pass identification, replacement for typeid.
174 SafeStack(const TargetMachine *TM)
175 : FunctionPass(ID), TM(TM), TL(nullptr), DL(nullptr) {
176 initializeSafeStackPass(*PassRegistry::getPassRegistry());
177 }
178 SafeStack() : SafeStack(nullptr) {}
179
180 void getAnalysisUsage(AnalysisUsage &AU) const override {
181 AU.addRequired();
182 }
183
184 bool doInitialization(Module &M) override {
185 DL = &M.getDataLayout();
186
187 StackPtrTy = Type::getInt8PtrTy(M.getContext());
188 IntPtrTy = DL->getIntPtrType(M.getContext());
189 Int32Ty = Type::getInt32Ty(M.getContext());
190 Int8Ty = Type::getInt8Ty(M.getContext());
191
192 return false;
193 }
194
195 bool runOnFunction(Function &F) override;
196 }; // class SafeStack
197
198 uint64_t SafeStack::getStaticAllocaAllocationSize(const AllocaInst* AI) {
199 uint64_t Size = DL->getTypeAllocSize(AI->getAllocatedType());
200 if (AI->isArrayAllocation()) {
201 auto C = dyn_cast(AI->getArraySize());
202 if (!C)
203 return 0;
204 Size *= C->getZExtValue();
205 }
206 return Size;
207 }
208
209 bool SafeStack::IsAccessSafe(Value *Addr, uint64_t AccessSize,
210 const Value *AllocaPtr, uint64_t AllocaSize) {
211 AllocaOffsetRewriter Rewriter(*SE, AllocaPtr);
212 const SCEV *Expr = Rewriter.visit(SE->getSCEV(Addr));
213
214 uint64_t BitWidth = SE->getTypeSizeInBits(Expr->getType());
215 ConstantRange AccessStartRange = SE->getUnsignedRange(Expr);
216 ConstantRange SizeRange =
217 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AccessSize));
218 ConstantRange AccessRange = AccessStartRange.add(SizeRange);
219 ConstantRange AllocaRange =
220 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AllocaSize));
221 bool Safe = AllocaRange.contains(AccessRange);
222
223 DEBUG(dbgs() << "[SafeStack] "
224 << (isa(AllocaPtr) ? "Alloca " : "ByValArgument ")
225 << *AllocaPtr << "\n"
226 << " Access " << *Addr << "\n"
227 << " SCEV " << *Expr
228 << " U: " << SE->getUnsignedRange(Expr)
229 << ", S: " << SE->getSignedRange(Expr) << "\n"
230 << " Range " << AccessRange << "\n"
231 << " AllocaRange " << AllocaRange << "\n"
232 << " " << (Safe ? "safe" : "unsafe") << "\n");
233
234 return Safe;
235 }
236
237 bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
238 const Value *AllocaPtr,
239 uint64_t AllocaSize) {
240 // All MemIntrinsics have destination address in Arg0 and size in Arg2.
241 if (MI->getRawDest() != U) return true;
242 const auto *Len = dyn_cast(MI->getLength());
243 // Non-constant size => unsafe. FIXME: try SCEV getRange.
244 if (!Len) return false;
245 return IsAccessSafe(U, Len->getZExtValue(), AllocaPtr, AllocaSize);
246 }
247
248 /// Check whether a given allocation must be put on the safe
249 /// stack or not. The function analyzes all uses of AI and checks whether it is
250 /// only accessed in a memory safe way (as decided statically).
251 bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) {
252 // Go through all uses of this alloca and check whether all accesses to the
253 // allocated object are statically known to be memory safe and, hence, the
254 // object can be placed on the safe stack.
255 SmallPtrSet Visited;
256 SmallVector WorkList;
257 WorkList.push_back(AllocaPtr);
258
259 // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc.
260 while (!WorkList.empty()) {
261 const Value *V = WorkList.pop_back_val();
262 for (const Use &UI : V->uses()) {
263 auto I = cast(UI.getUser());
264 assert(V == UI.get());
265
266 switch (I->getOpcode()) {
267 case Instruction::Load: {
268 if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getType()), AllocaPtr,
269 AllocaSize))
270 return false;
271 break;
272 }
273 case Instruction::VAArg:
274 // "va-arg" from a pointer is safe.
275 break;
276 case Instruction::Store: {
277 if (V == I->getOperand(0)) {
278 // Stored the pointer - conservatively assume it may be unsafe.
279 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
280 << "\n store of address: " << *I << "\n");
281 return false;
282 }
283
284 if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getOperand(0)->getType()),
285 AllocaPtr, AllocaSize))
286 return false;
287 break;
288 }
289 case Instruction::Ret: {
290 // Information leak.
291 return false;
292 }
293
294 case Instruction::Call:
295 case Instruction::Invoke: {
296 ImmutableCallSite CS(I);
297
298 if (const IntrinsicInst *II = dyn_cast(I)) {
299 if (II->getIntrinsicID() == Intrinsic::lifetime_start ||
300 II->getIntrinsicID() == Intrinsic::lifetime_end)
301 continue;
302 }
303
304 if (const MemIntrinsic *MI = dyn_cast(I)) {
305 if (!IsMemIntrinsicSafe(MI, UI, AllocaPtr, AllocaSize)) {
306 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
307 << "\n unsafe memintrinsic: " << *I
308 << "\n");
309 return false;
310 }
311 continue;
312 }
313
314 // LLVM 'nocapture' attribute is only set for arguments whose address
315 // is not stored, passed around, or used in any other non-trivial way.
316 // We assume that passing a pointer to an object as a 'nocapture
317 // readnone' argument is safe.
318 // FIXME: a more precise solution would require an interprocedural
319 // analysis here, which would look at all uses of an argument inside
320 // the function being called.
321 ImmutableCallSite::arg_iterator B = CS.arg_begin(), E = CS.arg_end();
322 for (ImmutableCallSite::arg_iterator A = B; A != E; ++A)
323 if (A->get() == V)
324 if (!(CS.doesNotCapture(A - B) && (CS.doesNotAccessMemory(A - B) ||
325 CS.doesNotAccessMemory()))) {
326 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
327 << "\n unsafe call: " << *I << "\n");
328 return false;
329 }
330 continue;
331 }
332
333 default:
334 if (Visited.insert(I).second)
335 WorkList.push_back(cast(I));
336 }
337 }
338 }
339
340 // All uses of the alloca are safe, we can place it on the safe stack.
341 return true;
342 }
343
344 Value *SafeStack::getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F) {
345 // Check if there is a target-specific location for the unsafe stack pointer.
346 if (TL)
347 if (Value *V = TL->getSafeStackPointerLocation(IRB))
348 return V;
349
350 // Otherwise, assume the target links with compiler-rt, which provides a
351 // thread-local variable with a magic name.
352 Module &M = *F.getParent();
353 const char *UnsafeStackPtrVar = "__safestack_unsafe_stack_ptr";
354 auto UnsafeStackPtr =
355 dyn_cast_or_null(M.getNamedValue(UnsafeStackPtrVar));
356
357 bool UseTLS = USPStorage == ThreadLocalUSP;
358
359 if (!UnsafeStackPtr) {
360 auto TLSModel = UseTLS ?
361 GlobalValue::InitialExecTLSModel :
362 GlobalValue::NotThreadLocal;
363 // The global variable is not defined yet, define it ourselves.
364 // We use the initial-exec TLS model because we do not support the
365 // variable living anywhere other than in the main executable.
366 UnsafeStackPtr = new GlobalVariable(
367 M, StackPtrTy, false, GlobalValue::ExternalLinkage, nullptr,
368 UnsafeStackPtrVar, nullptr, TLSModel);
369 } else {
370 // The variable exists, check its type and attributes.
371 if (UnsafeStackPtr->getValueType() != StackPtrTy)
372 report_fatal_error(Twine(UnsafeStackPtrVar) + " must have void* type");
373 if (UseTLS != UnsafeStackPtr->isThreadLocal())
374 report_fatal_error(Twine(UnsafeStackPtrVar) + " must " +
375 (UseTLS ? "" : "not ") + "be thread-local");
376 }
377 return UnsafeStackPtr;
378 }
379
380 void SafeStack::findInsts(Function &F,
381 SmallVectorImpl &StaticAllocas,
382 SmallVectorImpl &DynamicAllocas,
383 SmallVectorImpl &ByValArguments,
384 SmallVectorImpl &Returns,
385 SmallVectorImpl &StackRestorePoints) {
386 for (Instruction &I : instructions(&F)) {
387 if (auto AI = dyn_cast(&I)) {
388 ++NumAllocas;
389
390 uint64_t Size = getStaticAllocaAllocationSize(AI);
391 if (IsSafeStackAlloca(AI, Size))
392 continue;
393
394 if (AI->isStaticAlloca()) {
395 ++NumUnsafeStaticAllocas;
396 StaticAllocas.push_back(AI);
397 } else {
398 ++NumUnsafeDynamicAllocas;
399 DynamicAllocas.push_back(AI);
400 }
401 } else if (auto RI = dyn_cast(&I)) {
402 Returns.push_back(RI);
403 } else if (auto CI = dyn_cast(&I)) {
404 // setjmps require stack restore.
405 if (CI->getCalledFunction() && CI->canReturnTwice())
406 StackRestorePoints.push_back(CI);
407 } else if (auto LP = dyn_cast(&I)) {
408 // Exception landing pads require stack restore.
409 StackRestorePoints.push_back(LP);
410 } else if (auto II = dyn_cast(&I)) {
411 if (II->getIntrinsicID() == Intrinsic::gcroot)
412 llvm::report_fatal_error(
413 "gcroot intrinsic not compatible with safestack attribute");
414 }
415 }
416 for (Argument &Arg : F.args()) {
417 if (!Arg.hasByValAttr())
418 continue;
419 uint64_t Size =
420 DL->getTypeStoreSize(Arg.getType()->getPointerElementType());
421 if (IsSafeStackAlloca(&Arg, Size))
422 continue;
423
424 ++NumUnsafeByValArguments;
425 ByValArguments.push_back(&Arg);
426 }
427 }
428
429 AllocaInst *
430 SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F,
431 ArrayRef StackRestorePoints,
432 Value *StaticTop, bool NeedDynamicTop) {
433 if (StackRestorePoints.empty())
434 return nullptr;
435
436 // We need the current value of the shadow stack pointer to restore
437 // after longjmp or exception catching.
438
439 // FIXME: On some platforms this could be handled by the longjmp/exception
440 // runtime itself.
441
442 AllocaInst *DynamicTop = nullptr;
443 if (NeedDynamicTop)
444 // If we also have dynamic alloca's, the stack pointer value changes
445 // throughout the function. For now we store it in an alloca.
446 DynamicTop = IRB.CreateAlloca(StackPtrTy, /*ArraySize=*/nullptr,
447 "unsafe_stack_dynamic_ptr");
448
449 if (!StaticTop)
450 // We need the original unsafe stack pointer value, even if there are
451 // no unsafe static allocas.
452 StaticTop = IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
453
454 if (NeedDynamicTop)
455 IRB.CreateStore(StaticTop, DynamicTop);
456
457 // Restore current stack pointer after longjmp/exception catch.
458 for (Instruction *I : StackRestorePoints) {
459 ++NumUnsafeStackRestorePoints;
460
461 IRB.SetInsertPoint(I->getNextNode());
462 Value *CurrentTop = DynamicTop ? IRB.CreateLoad(DynamicTop) : StaticTop;
463 IRB.CreateStore(CurrentTop, UnsafeStackPtr);
464 }
465
466 return DynamicTop;
467 }
468
469 Value *SafeStack::moveStaticAllocasToUnsafeStack(
470 IRBuilder<> &IRB, Function &F, ArrayRef StaticAllocas,
471 ArrayRef ByValArguments, ArrayRef Returns) {
472 if (StaticAllocas.empty() && ByValArguments.empty())
473 return nullptr;
474
475 DIBuilder DIB(*F.getParent());
476
477 // We explicitly compute and set the unsafe stack layout for all unsafe
478 // static alloca instructions. We save the unsafe "base pointer" in the
479 // prologue into a local variable and restore it in the epilogue.
480
481 // Load the current stack pointer (we'll also use it as a base pointer).
482 // FIXME: use a dedicated register for it ?
483 Instruction *BasePointer =
484 IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
485 assert(BasePointer->getType() == StackPtrTy);
486
487 for (ReturnInst *RI : Returns) {
488 IRB.SetInsertPoint(RI);
489 IRB.CreateStore(BasePointer, UnsafeStackPtr);
490 }
491
492 // Compute maximum alignment among static objects on the unsafe stack.
493 unsigned MaxAlignment = 0;
494 for (Argument *Arg : ByValArguments) {
495 Type *Ty = Arg->getType()->getPointerElementType();
496 unsigned Align = std::max((unsigned)DL->getPrefTypeAlignment(Ty),
497 Arg->getParamAlignment());
498 if (Align > MaxAlignment)
499 MaxAlignment = Align;
500 }
501 for (AllocaInst *AI : StaticAllocas) {
502 Type *Ty = AI->getAllocatedType();
503 unsigned Align =
504 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment());
505 if (Align > MaxAlignment)
506 MaxAlignment = Align;
507 }
508
509 if (MaxAlignment > StackAlignment) {
510 // Re-align the base pointer according to the max requested alignment.
511 assert(isPowerOf2_32(MaxAlignment));
512 IRB.SetInsertPoint(BasePointer->getNextNode());
513 BasePointer = cast(IRB.CreateIntToPtr(
514 IRB.CreateAnd(IRB.CreatePtrToInt(BasePointer, IntPtrTy),
515 ConstantInt::get(IntPtrTy, ~uint64_t(MaxAlignment - 1))),
516 StackPtrTy));
517 }
518
519 int64_t StaticOffset = 0; // Current stack top.
520 IRB.SetInsertPoint(BasePointer->getNextNode());
521
522 for (Argument *Arg : ByValArguments) {
523 Type *Ty = Arg->getType()->getPointerElementType();
524
525 uint64_t Size = DL->getTypeStoreSize(Ty);
526 if (Size == 0)
527 Size = 1; // Don't create zero-sized stack objects.
528
529 // Ensure the object is properly aligned.
530 unsigned Align = std::max((unsigned)DL->getPrefTypeAlignment(Ty),
531 Arg->getParamAlignment());
532
533 // Add alignment.
534 // NOTE: we ensure that BasePointer itself is aligned to >= Align.
535 StaticOffset += Size;
536 StaticOffset = alignTo(StaticOffset, Align);
537
538 Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8*
539 ConstantInt::get(Int32Ty, -StaticOffset));
540 Value *NewArg = IRB.CreateBitCast(Off, Arg->getType(),
541 Arg->getName() + ".unsafe-byval");
542
543 // Replace alloc with the new location.
544 replaceDbgDeclare(Arg, BasePointer, BasePointer->getNextNode(), DIB,
545 /*Deref=*/true, -StaticOffset);
546 Arg->replaceAllUsesWith(NewArg);
547 IRB.SetInsertPoint(cast(NewArg)->getNextNode());
548 IRB.CreateMemCpy(Off, Arg, Size, Arg->getParamAlignment());
549 }
550
551 // Allocate space for every unsafe static AllocaInst on the unsafe stack.
552 for (AllocaInst *AI : StaticAllocas) {
553 IRB.SetInsertPoint(AI);
554
555 Type *Ty = AI->getAllocatedType();
556 uint64_t Size = getStaticAllocaAllocationSize(AI);
557 if (Size == 0)
558 Size = 1; // Don't create zero-sized stack objects.
559
560 // Ensure the object is properly aligned.
561 unsigned Align =
562 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment());
563
564 // Add alignment.
565 // NOTE: we ensure that BasePointer itself is aligned to >= Align.
566 StaticOffset += Size;
567 StaticOffset = alignTo(StaticOffset, Align);
568
569 Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8*
570 ConstantInt::get(Int32Ty, -StaticOffset));
571 Value *NewAI = IRB.CreateBitCast(Off, AI->getType(), AI->getName());
572 if (AI->hasName() && isa(NewAI))
573 cast(NewAI)->takeName(AI);
574
575 // Replace alloc with the new location.
576 replaceDbgDeclareForAlloca(AI, BasePointer, DIB, /*Deref=*/true, -StaticOffset);
577 AI->replaceAllUsesWith(NewAI);
578 AI->eraseFromParent();
579 }
580
581 // Re-align BasePointer so that our callees would see it aligned as
582 // expected.
583 // FIXME: no need to update BasePointer in leaf functions.
584 StaticOffset = alignTo(StaticOffset, StackAlignment);
585
586 // Update shadow stack pointer in the function epilogue.
587 IRB.SetInsertPoint(BasePointer->getNextNode());
588
589 Value *StaticTop =
590 IRB.CreateGEP(BasePointer, ConstantInt::get(Int32Ty, -StaticOffset),
591 "unsafe_stack_static_top");
592 IRB.CreateStore(StaticTop, UnsafeStackPtr);
593 return StaticTop;
594 }
595
596 void SafeStack::moveDynamicAllocasToUnsafeStack(
597 Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop,
598 ArrayRef DynamicAllocas) {
599 DIBuilder DIB(*F.getParent());
600
601 for (AllocaInst *AI : DynamicAllocas) {
602 IRBuilder<> IRB(AI);
603
604 // Compute the new SP value (after AI).
605 Value *ArraySize = AI->getArraySize();
606 if (ArraySize->getType() != IntPtrTy)
607 ArraySize = IRB.CreateIntCast(ArraySize, IntPtrTy, false);
608
609 Type *Ty = AI->getAllocatedType();
610 uint64_t TySize = DL->getTypeAllocSize(Ty);
611 Value *Size = IRB.CreateMul(ArraySize, ConstantInt::get(IntPtrTy, TySize));
612
613 Value *SP = IRB.CreatePtrToInt(IRB.CreateLoad(UnsafeStackPtr), IntPtrTy);
614 SP = IRB.CreateSub(SP, Size);
615
616 // Align the SP value to satisfy the AllocaInst, type and stack alignments.
617 unsigned Align = std::max(
618 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()),
619 (unsigned)StackAlignment);
620
621 assert(isPowerOf2_32(Align));
622 Value *NewTop = IRB.CreateIntToPtr(
623 IRB.CreateAnd(SP, ConstantInt::get(IntPtrTy, ~uint64_t(Align - 1))),
624 StackPtrTy);
625
626 // Save the stack pointer.
627 IRB.CreateStore(NewTop, UnsafeStackPtr);
628 if (DynamicTop)
629 IRB.CreateStore(NewTop, DynamicTop);
630
631 Value *NewAI = IRB.CreatePointerCast(NewTop, AI->getType());
632 if (AI->hasName() && isa(NewAI))
633 NewAI->takeName(AI);
634
635 replaceDbgDeclareForAlloca(AI, NewAI, DIB, /*Deref=*/true);
636 AI->replaceAllUsesWith(NewAI);
637 AI->eraseFromParent();
638 }
639
640 if (!DynamicAllocas.empty()) {
641 // Now go through the instructions again, replacing stacksave/stackrestore.
642 for (inst_iterator It = inst_begin(&F), Ie = inst_end(&F); It != Ie;) {
643 Instruction *I = &*(It++);
644 auto II = dyn_cast(I);
645 if (!II)
646 continue;
647
648 if (II->getIntrinsicID() == Intrinsic::stacksave) {
649 IRBuilder<> IRB(II);
650 Instruction *LI = IRB.CreateLoad(UnsafeStackPtr);
651 LI->takeName(II);
652 II->replaceAllUsesWith(LI);
653 II->eraseFromParent();
654 } else if (II->getIntrinsicID() == Intrinsic::stackrestore) {
655 IRBuilder<> IRB(II);
656 Instruction *SI = IRB.CreateStore(II->getArgOperand(0), UnsafeStackPtr);
657 SI->takeName(II);
658 assert(II->use_empty());
659 II->eraseFromParent();
660 }
661 }
662 }
663 }
664
665 bool SafeStack::runOnFunction(Function &F) {
666 DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
667
668 if (!F.hasFnAttribute(Attribute::SafeStack)) {
669 DEBUG(dbgs() << "[SafeStack] safestack is not requested"
670 " for this function\n");
671 return false;
672 }
673
674 if (F.isDeclaration()) {
675 DEBUG(dbgs() << "[SafeStack] function definition"
676 " is not available\n");
677 return false;
678 }
679
680 TL = TM ? TM->getSubtargetImpl(F)->getTargetLowering() : nullptr;
681 SE = &getAnalysis().getSE();
682
683 {
684 // Make sure the regular stack protector won't run on this function
685 // (safestack attribute takes precedence).
686 AttrBuilder B;
687 B.addAttribute(Attribute::StackProtect)
688 .addAttribute(Attribute::StackProtectReq)
689 .addAttribute(Attribute::StackProtectStrong);
690 F.removeAttributes(
691 AttributeSet::FunctionIndex,
692 AttributeSet::get(F.getContext(), AttributeSet::FunctionIndex, B));
693 }
694
695 ++NumFunctions;
696
697 SmallVector StaticAllocas;
698 SmallVector DynamicAllocas;
699 SmallVector ByValArguments;
700 SmallVector Returns;
701
702 // Collect all points where stack gets unwound and needs to be restored
703 // This is only necessary because the runtime (setjmp and unwind code) is
704 // not aware of the unsafe stack and won't unwind/restore it prorerly.
705 // To work around this problem without changing the runtime, we insert
706 // instrumentation to restore the unsafe stack pointer when necessary.
707 SmallVector StackRestorePoints;
708
709 // Find all static and dynamic alloca instructions that must be moved to the
710 // unsafe stack, all return instructions and stack restore points.
711 findInsts(F, StaticAllocas, DynamicAllocas, ByValArguments, Returns,
712 StackRestorePoints);
713
714 if (StaticAllocas.empty() && DynamicAllocas.empty() &&
715 ByValArguments.empty() && StackRestorePoints.empty())
716 return false; // Nothing to do in this function.
717
718 if (!StaticAllocas.empty() || !DynamicAllocas.empty() ||
719 !ByValArguments.empty())
720 ++NumUnsafeStackFunctions; // This function has the unsafe stack.
721
722 if (!StackRestorePoints.empty())
723 ++NumUnsafeStackRestorePointsFunctions;
724
725 IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt());
726 UnsafeStackPtr = getOrCreateUnsafeStackPtr(IRB, F);
727
728 // The top of the unsafe stack after all unsafe static allocas are allocated.
729 Value *StaticTop = moveStaticAllocasToUnsafeStack(IRB, F, StaticAllocas,
730 ByValArguments, Returns);
731
732 // Safe stack object that stores the current unsafe stack top. It is updated
733 // as unsafe dynamic (non-constant-sized) allocas are allocated and freed.
734 // This is only needed if we need to restore stack pointer after longjmp
735 // or exceptions, and we have dynamic allocations.
736 // FIXME: a better alternative might be to store the unsafe stack pointer
737 // before setjmp / invoke instructions.
738 AllocaInst *DynamicTop = createStackRestorePoints(
739 IRB, F, StackRestorePoints, StaticTop, !DynamicAllocas.empty());
740
741 // Handle dynamic allocas.
742 moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop,
743 DynamicAllocas);
744
745 DEBUG(dbgs() << "[SafeStack] safestack applied\n");
746 return true;
747 }
748
749 } // anonymous namespace
750
751 char SafeStack::ID = 0;
752 INITIALIZE_TM_PASS_BEGIN(SafeStack, "safe-stack",
753 "Safe Stack instrumentation pass", false, false)
754 INITIALIZE_TM_PASS_END(SafeStack, "safe-stack",
755 "Safe Stack instrumentation pass", false, false)
756
757 FunctionPass *llvm::createSafeStackPass(const llvm::TargetMachine *TM) {
758 return new SafeStack(TM);
759 }
66 Instrumentation.cpp
77 InstrProfiling.cpp
88 PGOInstrumentation.cpp
9 SafeStack.cpp
109 SanitizerCoverage.cpp
1110 ThreadSanitizer.cpp
1211
6666 initializeThreadSanitizerPass(Registry);
6767 initializeSanitizerCoverageModulePass(Registry);
6868 initializeDataFlowSanitizerPass(Registry);
69 initializeSafeStackPass(Registry);
7069 }
7170
7271 /// LLVMInitializeInstrumentation - C binding for
+0
-760
lib/Transforms/Instrumentation/SafeStack.cpp less more
None //===-- SafeStack.cpp - Safe Stack Insertion ------------------------------===//
1 //
2 // The LLVM Compiler Infrastructure
3 //
4 // This file is distributed under the University of Illinois Open Source
5 // License. See LICENSE.TXT for details.
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass splits the stack into the safe stack (kept as-is for LLVM backend)
10 // and the unsafe stack (explicitly allocated and managed through the runtime
11 // support library).
12 //
13 // http://clang.llvm.org/docs/SafeStack.html
14 //
15 //===----------------------------------------------------------------------===//
16
17 #include "llvm/Transforms/Instrumentation.h"
18 #include "llvm/ADT/Statistic.h"
19 #include "llvm/ADT/Triple.h"
20 #include "llvm/Analysis/ScalarEvolution.h"
21 #include "llvm/Analysis/ScalarEvolutionExpressions.h"
22 #include "llvm/CodeGen/Passes.h"
23 #include "llvm/IR/Constants.h"
24 #include "llvm/IR/DataLayout.h"
25 #include "llvm/IR/DerivedTypes.h"
26 #include "llvm/IR/DIBuilder.h"
27 #include "llvm/IR/Function.h"
28 #include "llvm/IR/InstIterator.h"
29 #include "llvm/IR/Instructions.h"
30 #include "llvm/IR/IntrinsicInst.h"
31 #include "llvm/IR/Intrinsics.h"
32 #include "llvm/IR/IRBuilder.h"
33 #include "llvm/IR/Module.h"
34 #include "llvm/Pass.h"
35 #include "llvm/Support/CommandLine.h"
36 #include "llvm/Support/Debug.h"
37 #include "llvm/Support/Format.h"
38 #include "llvm/Support/MathExtras.h"
39 #include "llvm/Support/raw_os_ostream.h"
40 #include "llvm/Target/TargetLowering.h"
41 #include "llvm/Target/TargetSubtargetInfo.h"
42 #include "llvm/Transforms/Utils/Local.h"
43 #include "llvm/Transforms/Utils/ModuleUtils.h"
44
45 using namespace llvm;
46
47 #define DEBUG_TYPE "safestack"
48
49 enum UnsafeStackPtrStorageVal { ThreadLocalUSP, SingleThreadUSP };
50
51 static cl::opt USPStorage("safe-stack-usp-storage",
52 cl::Hidden, cl::init(ThreadLocalUSP),
53 cl::desc("Type of storage for the unsafe stack pointer"),
54 cl::values(clEnumValN(ThreadLocalUSP, "thread-local",
55 "Thread-local storage"),
56 clEnumValN(SingleThreadUSP, "single-thread",
57 "Non-thread-local storage"),
58 clEnumValEnd));
59
60 namespace llvm {
61
62 STATISTIC(NumFunctions, "Total number of functions");
63 STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack");
64 STATISTIC(NumUnsafeStackRestorePointsFunctions,
65 "Number of functions that use setjmp or exceptions");
66
67 STATISTIC(NumAllocas, "Total number of allocas");
68 STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas");
69 STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas");
70 STATISTIC(NumUnsafeByValArguments, "Number of unsafe byval arguments");
71 STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads");
72
73 } // namespace llvm
74
75 namespace {
76
77 /// Rewrite an SCEV expression for a memory access address to an expression that
78 /// represents offset from the given alloca.
79 ///
80 /// The implementation simply replaces all mentions of the alloca with zero.
81 class AllocaOffsetRewriter : public SCEVRewriteVisitor {
82 const Value *AllocaPtr;
83
84 public:
85 AllocaOffsetRewriter(ScalarEvolution &SE, const Value *AllocaPtr)
86 : SCEVRewriteVisitor(SE), AllocaPtr(AllocaPtr) {}
87
88 const SCEV *visitUnknown(const SCEVUnknown *Expr) {
89 if (Expr->getValue() == AllocaPtr)
90 return SE.getZero(Expr->getType());
91 return Expr;
92 }
93 };
94
95 /// The SafeStack pass splits the stack of each function into the safe
96 /// stack, which is only accessed through memory safe dereferences (as
97 /// determined statically), and the unsafe stack, which contains all
98 /// local variables that are accessed in ways that we can't prove to
99 /// be safe.
100 class SafeStack : public FunctionPass {
101 const TargetMachine *TM;
102 const TargetLoweringBase *TL;
103 const DataLayout *DL;
104 ScalarEvolution *SE;
105
106 Type *StackPtrTy;
107 Type *IntPtrTy;
108 Type *Int32Ty;
109 Type *Int8Ty;
110
111 Value *UnsafeStackPtr = nullptr;
112
113 /// Unsafe stack alignment. Each stack frame must ensure that the stack is
114 /// aligned to this value. We need to re-align the unsafe stack if the
115 /// alignment of any object on the stack exceeds this value.
116 ///
117 /// 16 seems like a reasonable upper bound on the alignment of objects that we
118 /// might expect to appear on the stack on most common targets.
119 enum { StackAlignment = 16 };
120
121 /// \brief Build a value representing a pointer to the unsafe stack pointer.
122 Value *getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F);
123
124 /// \brief Find all static allocas, dynamic allocas, return instructions and
125 /// stack restore points (exception unwind blocks and setjmp calls) in the
126 /// given function and append them to the respective vectors.
127 void findInsts(Function &F, SmallVectorImpl &StaticAllocas,
128 SmallVectorImpl &DynamicAllocas,
129 SmallVectorImpl &ByValArguments,
130 SmallVectorImpl &Returns,
131 SmallVectorImpl &StackRestorePoints);
132
133 /// \brief Calculate the allocation size of a given alloca. Returns 0 if the
134 /// size can not be statically determined.
135 uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI);
136
137 /// \brief Allocate space for all static allocas in \p StaticAllocas,
138 /// replace allocas with pointers into the unsafe stack and generate code to
139 /// restore the stack pointer before all return instructions in \p Returns.
140 ///
141 /// \returns A pointer to the top of the unsafe stack after all unsafe static
142 /// allocas are allocated.
143 Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F,
144 ArrayRef StaticAllocas,
145 ArrayRef ByValArguments,
146 ArrayRef Returns);
147
148 /// \brief Generate code to restore the stack after all stack restore points
149 /// in \p StackRestorePoints.
150 ///
151 /// \returns A local variable in which to maintain the dynamic top of the
152 /// unsafe stack if needed.
153 AllocaInst *
154 createStackRestorePoints(IRBuilder<> &IRB, Function &F,
155 ArrayRef StackRestorePoints,
156 Value *StaticTop, bool NeedDynamicTop);
157
158 /// \brief Replace all allocas in \p DynamicAllocas with code to allocate
159 /// space dynamically on the unsafe stack and store the dynamic unsafe stack
160 /// top to \p DynamicTop if non-null.
161 void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr,
162 AllocaInst *DynamicTop,
163 ArrayRef DynamicAllocas);
164
165 bool IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize);
166
167 bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
168 const Value *AllocaPtr, uint64_t AllocaSize);
169 bool IsAccessSafe(Value *Addr, uint64_t Size, const Value *AllocaPtr,
170 uint64_t AllocaSize);
171
172 public:
173 static char ID; // Pass identification, replacement for typeid.
174 SafeStack(const TargetMachine *TM)
175 : FunctionPass(ID), TM(TM), TL(nullptr), DL(nullptr) {
176 initializeSafeStackPass(*PassRegistry::getPassRegistry());
177 }
178 SafeStack() : SafeStack(nullptr) {}
179
180 void getAnalysisUsage(AnalysisUsage &AU) const override {
181 AU.addRequired();
182 }
183
184 bool doInitialization(Module &M) override {
185 DL = &M.getDataLayout();
186
187 StackPtrTy = Type::getInt8PtrTy(M.getContext());
188 IntPtrTy = DL->getIntPtrType(M.getContext());
189 Int32Ty = Type::getInt32Ty(M.getContext());
190 Int8Ty = Type::getInt8Ty(M.getContext());
191
192 return false;
193 }
194
195 bool runOnFunction(Function &F) override;
196 }; // class SafeStack
197
198 uint64_t SafeStack::getStaticAllocaAllocationSize(const AllocaInst* AI) {
199 uint64_t Size = DL->getTypeAllocSize(AI->getAllocatedType());
200 if (AI->isArrayAllocation()) {
201 auto C = dyn_cast(AI->getArraySize());
202 if (!C)
203 return 0;
204 Size *= C->getZExtValue();
205 }
206 return Size;
207 }
208
209 bool SafeStack::IsAccessSafe(Value *Addr, uint64_t AccessSize,
210 const Value *AllocaPtr, uint64_t AllocaSize) {
211 AllocaOffsetRewriter Rewriter(*SE, AllocaPtr);
212 const SCEV *Expr = Rewriter.visit(SE->getSCEV(Addr));
213
214 uint64_t BitWidth = SE->getTypeSizeInBits(Expr->getType());
215 ConstantRange AccessStartRange = SE->getUnsignedRange(Expr);
216 ConstantRange SizeRange =
217 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AccessSize));
218 ConstantRange AccessRange = AccessStartRange.add(SizeRange);
219 ConstantRange AllocaRange =
220 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AllocaSize));
221 bool Safe = AllocaRange.contains(AccessRange);
222
223 DEBUG(dbgs() << "[SafeStack] "
224 << (isa(AllocaPtr) ? "Alloca " : "ByValArgument ")
225 << *AllocaPtr << "\n"
226 << " Access " << *Addr << "\n"
227 << " SCEV " << *Expr
228 << " U: " << SE->getUnsignedRange(Expr)
229 << ", S: " << SE->getSignedRange(Expr) << "\n"
230 << " Range " << AccessRange << "\n"
231 << " AllocaRange " << AllocaRange << "\n"
232 << " " << (Safe ? "safe" : "unsafe") << "\n");
233
234 return Safe;
235 }
236
237 bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
238 const Value *AllocaPtr,
239 uint64_t AllocaSize) {
240 // All MemIntrinsics have destination address in Arg0 and size in Arg2.
241 if (MI->getRawDest() != U) return true;
242 const auto *Len = dyn_cast(MI->getLength());
243 // Non-constant size => unsafe. FIXME: try SCEV getRange.
244 if (!Len) return false;
245 return IsAccessSafe(U, Len->getZExtValue(), AllocaPtr, AllocaSize);
246 }
247
248 /// Check whether a given allocation must be put on the safe
249 /// stack or not. The function analyzes all uses of AI and checks whether it is
250 /// only accessed in a memory safe way (as decided statically).
251 bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) {
252 // Go through all uses of this alloca and check whether all accesses to the
253 // allocated object are statically known to be memory safe and, hence, the
254 // object can be placed on the safe stack.
255 SmallPtrSet Visited;
256 SmallVector WorkList;
257 WorkList.push_back(AllocaPtr);
258
259 // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc.
260 while (!WorkList.empty()) {
261 const Value *V = WorkList.pop_back_val();
262 for (const Use &UI : V->uses()) {
263 auto I = cast(UI.getUser());
264 assert(V == UI.get());
265
266 switch (I->getOpcode()) {
267 case Instruction::Load: {
268 if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getType()), AllocaPtr,
269 AllocaSize))
270 return false;
271 break;
272 }
273 case Instruction::VAArg:
274 // "va-arg" from a pointer is safe.
275 break;
276 case Instruction::Store: {
277 if (V == I->getOperand(0)) {
278 // Stored the pointer - conservatively assume it may be unsafe.
279 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
280 << "\n store of address: " << *I << "\n");
281 return false;
282 }
283
284 if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getOperand(0)->getType()),
285 AllocaPtr, AllocaSize))
286 return false;
287 break;
288 }
289 case Instruction::Ret: {
290 // Information leak.
291 return false;
292 }
293
294 case Instruction::Call:
295 case Instruction::Invoke: {
296 ImmutableCallSite CS(I);
297
298 if (const IntrinsicInst *II = dyn_cast(I)) {
299 if (II->getIntrinsicID() == Intrinsic::lifetime_start ||
300 II->getIntrinsicID() == Intrinsic::lifetime_end)
301 continue;
302 }
303
304 if (const MemIntrinsic *MI = dyn_cast(I)) {
305 if (!IsMemIntrinsicSafe(MI, UI, AllocaPtr, AllocaSize)) {
306 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
307 << "\n unsafe memintrinsic: " << *I
308 << "\n");
309 return false;
310 }
311 continue;
312 }
313
314 // LLVM 'nocapture' attribute is only set for arguments whose address
315 // is not stored, passed around, or used in any other non-trivial way.
316 // We assume that passing a pointer to an object as a 'nocapture
317 // readnone' argument is safe.
318 // FIXME: a more precise solution would require an interprocedural
319 // analysis here, which would look at all uses of an argument inside
320 // the function being called.
321 ImmutableCallSite::arg_iterator B = CS.arg_begin(), E = CS.arg_end();
322 for (ImmutableCallSite::arg_iterator A = B; A != E; ++A)
323 if (A->get() == V)
324 if (!(CS.doesNotCapture(A - B) && (CS.doesNotAccessMemory(A - B) ||
325 CS.doesNotAccessMemory()))) {
326 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
327 << "\n unsafe call: " << *I << "\n");
328 return false;
329 }
330 continue;
331 }
332
333 default:
334 if (Visited.insert(I).second)
335 WorkList.push_back(cast(I));
336 }
337 }
338 }
339
340 // All uses of the alloca are safe, we can place it on the safe stack.
341 return true;
342 }
343
344 Value *SafeStack::getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F) {
345 // Check if there is a target-specific location for the unsafe stack pointer.
346 if (TL)
347 if (Value *V = TL->getSafeStackPointerLocation(IRB))
348 return V;
349
350 // Otherwise, assume the target links with compiler-rt, which provides a
351 // thread-local variable with a magic name.
352 Module &M = *F.getParent();
353 const char *UnsafeStackPtrVar = "__safestack_unsafe_stack_ptr";
354 auto UnsafeStackPtr =
355 dyn_cast_or_null(M.getNamedValue(UnsafeStackPtrVar));
356
357 bool UseTLS = USPStorage == ThreadLocalUSP;
358
359 if (!UnsafeStackPtr) {
360 auto TLSModel = UseTLS ?
361 GlobalValue::InitialExecTLSModel :
362 GlobalValue::NotThreadLocal;
363 // The global variable is not defined yet, define it ourselves.
364 // We use the initial-exec TLS model because we do not support the
365 // variable living anywhere other than in the main executable.
366 UnsafeStackPtr = new GlobalVariable(
367 M, StackPtrTy, false, GlobalValue::ExternalLinkage, nullptr,
368 UnsafeStackPtrVar, nullptr, TLSModel);
369 } else {
370 // The variable exists, check its type and attributes.
371 if (UnsafeStackPtr->getValueType() != StackPtrTy)
372 report_fatal_error(Twine(UnsafeStackPtrVar) + " must have void* type");
373 if (UseTLS != UnsafeStackPtr->isThreadLocal())
374 report_fatal_error(Twine(UnsafeStackPtrVar) + " must " +
375 (UseTLS ? "" : "not ") + "be thread-local");
376 }
377 return UnsafeStackPtr;
378 }
379
380 void SafeStack::findInsts(Function &F,
381 SmallVectorImpl &StaticAllocas,
382 SmallVectorImpl &DynamicAllocas,
383 SmallVectorImpl &ByValArguments,
384 SmallVectorImpl &Returns,
385 SmallVectorImpl &StackRestorePoints) {
386 for (Instruction &I : instructions(&F)) {
387 if (auto AI = dyn_cast(&I)) {
388 ++NumAllocas;
389
390 uint64_t Size = getStaticAllocaAllocationSize(AI);
391 if (IsSafeStackAlloca(AI, Size))
392 continue;
393
394 if (AI->isStaticAlloca()) {
395 ++NumUnsafeStaticAllocas;
396 StaticAllocas.push_back(AI);
397 } else {
398 ++NumUnsafeDynamicAllocas;
399 DynamicAllocas.push_back(AI);
400 }
401 } else if (auto RI = dyn_cast(&I)) {
402 Returns.push_back(RI);
403 } else if (auto CI = dyn_cast(&I)) {
404 // setjmps require stack restore.
405 if (CI->getCalledFunction() && CI->canReturnTwice())
406 StackRestorePoints.push_back(CI);
407 } else if (auto LP = dyn_cast(&I)) {
408 // Exception landing pads require stack restore.
409 StackRestorePoints.push_back(LP);
410 } else if (auto II = dyn_cast(&I)) {
411 if (II->getIntrinsicID() == Intrinsic::gcroot)
412 llvm::report_fatal_error(
413 "gcroot intrinsic not compatible with safestack attribute");
414 }
415 }
416 for (Argument &Arg : F.args()) {
417 if (!Arg.hasByValAttr())
418 continue;
419 uint64_t Size =
420 DL->getTypeStoreSize(Arg.getType()->getPointerElementType());
421 if (IsSafeStackAlloca(&Arg, Size))
422 continue;
423
424 ++NumUnsafeByValArguments;
425 ByValArguments.push_back(&Arg);
426 }
427 }
428
429 AllocaInst *
430 SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F,
431 ArrayRef StackRestorePoints,
432 Value *StaticTop, bool NeedDynamicTop) {
433 if (StackRestorePoints.empty())
434 return nullptr;
435
436 // We need the current value of the shadow stack pointer to restore
437 // after longjmp or exception catching.
438
439 // FIXME: On some platforms this could be handled by the longjmp/exception
440 // runtime itself.
441
442 AllocaInst *DynamicTop = nullptr;
443 if (NeedDynamicTop)
444 // If we also have dynamic alloca's, the stack pointer value changes
445 // throughout the function. For now we store it in an alloca.
446 DynamicTop = IRB.CreateAlloca(StackPtrTy, /*ArraySize=*/nullptr,
447 "unsafe_stack_dynamic_ptr");
448
449 if (!StaticTop)
450 // We need the original unsafe stack pointer value, even if there are
451 // no unsafe static allocas.
452 StaticTop = IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
453
454 if (NeedDynamicTop)
455 IRB.CreateStore(StaticTop, DynamicTop);
456
457 // Restore current stack pointer after longjmp/exception catch.
458 for (Instruction *I : StackRestorePoints) {
459 ++NumUnsafeStackRestorePoints;
460
461 IRB.SetInsertPoint(I->getNextNode());
462 Value *CurrentTop = DynamicTop ? IRB.CreateLoad(DynamicTop) : StaticTop;
463 IRB.CreateStore(CurrentTop, UnsafeStackPtr);
464 }
465
466 return DynamicTop;
467 }
468
469 Value *SafeStack::moveStaticAllocasToUnsafeStack(
470 IRBuilder<> &IRB, Function &F, ArrayRef StaticAllocas,
471 ArrayRef ByValArguments, ArrayRef Returns) {
472 if (StaticAllocas.empty() && ByValArguments.empty())
473 return nullptr;
474
475 DIBuilder DIB(*F.getParent());
476
477 // We explicitly compute and set the unsafe stack layout for all unsafe
478 // static alloca instructions. We save the unsafe "base pointer" in the
479 // prologue into a local variable and restore it in the epilogue.
480
481 // Load the current stack pointer (we'll also use it as a base pointer).
482 // FIXME: use a dedicated register for it ?
483 Instruction *BasePointer =
484 IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
485 assert(BasePointer->getType() == StackPtrTy);
486
487 for (ReturnInst *RI : Returns) {
488 IRB.SetInsertPoint(RI);
489 IRB.CreateStore(BasePointer, UnsafeStackPtr);
490 }
491
492 // Compute maximum alignment among static objects on the unsafe stack.
493 unsigned MaxAlignment = 0;
494 for (Argument *Arg : ByValArguments) {
495 Type *Ty = Arg->getType()->getPointerElementType();
496 unsigned Align = std::max((unsigned)DL->getPrefTypeAlignment(Ty),
497 Arg->getParamAlignment());
498 if (Align > MaxAlignment)
499 MaxAlignment = Align;
500 }
501 for (AllocaInst *AI : StaticAllocas) {
502 Type *Ty = AI->getAllocatedType();
503 unsigned Align =
504 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment());
505 if (Align > MaxAlignment)
506 MaxAlignment = Align;
507 }
508
509 if (MaxAlignment > StackAlignment) {
510 // Re-align the base pointer according to the max requested alignment.
511 assert(isPowerOf2_32(MaxAlignment));
512 IRB.SetInsertPoint(BasePointer->getNextNode());
513 BasePointer = cast(IRB.CreateIntToPtr(
514 IRB.CreateAnd(IRB.CreatePtrToInt(BasePointer, IntPtrTy),
515 ConstantInt::get(IntPtrTy, ~uint64_t(MaxAlignment - 1))),
516 StackPtrTy));
517 }
518
519 int64_t StaticOffset = 0; // Current stack top.
520 IRB.SetInsertPoint(BasePointer->getNextNode());
521
522 for (Argument *Arg : ByValArguments) {
523 Type *Ty = Arg->getType()->getPointerElementType();
524
525 uint64_t Size = DL->getTypeStoreSize(Ty);
526 if (Size == 0)
527 Size = 1; // Don't create zero-sized stack objects.
528
529 // Ensure the object is properly aligned.
530 unsigned Align = std::max((unsigned)DL->getPrefTypeAlignment(Ty),
531 Arg->getParamAlignment());
532
533 // Add alignment.
534 // NOTE: we ensure that BasePointer itself is aligned to >= Align.
535 StaticOffset += Size;
536 StaticOffset = alignTo(StaticOffset, Align);
537
538 Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8*
539 ConstantInt::get(Int32Ty, -StaticOffset));
540 Value *NewArg = IRB.CreateBitCast(Off, Arg->getType(),
541 Arg->getName() + ".unsafe-byval");
542
543 // Replace alloc with the new location.
544 replaceDbgDeclare(Arg, BasePointer, BasePointer->getNextNode(), DIB,
545 /*Deref=*/true, -StaticOffset);
546 Arg->replaceAllUsesWith(NewArg);
547 IRB.SetInsertPoint(cast(NewArg)->getNextNode());
548 IRB.CreateMemCpy(Off, Arg, Size, Arg->getParamAlignment());
549 }
550
551 // Allocate space for every unsafe static AllocaInst on the unsafe stack.
552 for (AllocaInst *AI : StaticAllocas) {
553 IRB.SetInsertPoint(AI);
554
555 Type *Ty = AI->getAllocatedType();
556 uint64_t Size = getStaticAllocaAllocationSize(AI);
557 if (Size == 0)
558 Size = 1; // Don't create zero-sized stack objects.
559
560 // Ensure the object is properly aligned.
561 unsigned Align =
562 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment());
563
564 // Add alignment.
565 // NOTE: we ensure that BasePointer itself is aligned to >= Align.
566 StaticOffset += Size;
567 StaticOffset = alignTo(StaticOffset, Align);
568
569 Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8*
570 ConstantInt::get(Int32Ty, -StaticOffset));
571 Value *NewAI = IRB.CreateBitCast(Off, AI->getType(), AI->getName());
572 if (AI->hasName() && isa(NewAI))
573 cast(NewAI)->takeName(AI);
574
575 // Replace alloc with the new location.
576 replaceDbgDeclareForAlloca(AI, BasePointer, DIB, /*Deref=*/true, -StaticOffset);
577 AI->replaceAllUsesWith(NewAI);
578 AI->eraseFromParent();
579 }
580
581 // Re-align BasePointer so that our callees would see it aligned as
582 // expected.
583 // FIXME: no need to update BasePointer in leaf functions.
584 StaticOffset = alignTo(StaticOffset, StackAlignment);
585
586 // Update shadow stack pointer in the function epilogue.
587 IRB.SetInsertPoint(BasePointer->getNextNode());
588
589 Value *StaticTop =
590 IRB.CreateGEP(BasePointer, ConstantInt::get(Int32Ty, -StaticOffset),
591 "unsafe_stack_static_top");
592 IRB.CreateStore(StaticTop, UnsafeStackPtr);
593 return StaticTop;
594 }
595
596 void SafeStack::moveDynamicAllocasToUnsafeStack(
597 Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop,
598 ArrayRef DynamicAllocas) {
599 DIBuilder DIB(*F.getParent());
600
601 for (AllocaInst *AI : DynamicAllocas) {
602 IRBuilder<> IRB(AI);
603
604 // Compute the new SP value (after AI).
605 Value *ArraySize = AI->getArraySize();
606 if (ArraySize->getType() != IntPtrTy)
607 ArraySize = IRB.CreateIntCast(ArraySize, IntPtrTy, false);
608
609 Type *Ty = AI->getAllocatedType();
610 uint64_t TySize = DL->getTypeAllocSize(Ty);
611 Value *Size = IRB.CreateMul(ArraySize, ConstantInt::get(IntPtrTy, TySize));
612
613 Value *SP = IRB.CreatePtrToInt(IRB.CreateLoad(UnsafeStackPtr), IntPtrTy);
614 SP = IRB.CreateSub(SP, Size);
615
616 // Align the SP value to satisfy the AllocaInst, type and stack alignments.
617 unsigned Align = std::max(
618 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()),
619 (unsigned)StackAlignment);
620
621 assert(isPowerOf2_32(Align));
622 Value *NewTop = IRB.CreateIntToPtr(
623 IRB.CreateAnd(SP, ConstantInt::get(IntPtrTy, ~uint64_t(Align - 1))),
624 StackPtrTy);
625
626 // Save the stack pointer.
627 IRB.CreateStore(NewTop, UnsafeStackPtr);
628 if (DynamicTop)
629 IRB.CreateStore(NewTop, DynamicTop);
630
631 Value *NewAI = IRB.CreatePointerCast(NewTop, AI->getType());
632 if (AI->hasName() && isa(NewAI))
633 NewAI->takeName(AI);
634
635 replaceDbgDeclareForAlloca(AI, NewAI, DIB, /*Deref=*/true);
636 AI->replaceAllUsesWith(NewAI);
637 AI->eraseFromParent();
638 }
639
640 if (!DynamicAllocas.empty()) {
641 // Now go through the instructions again, replacing stacksave/stackrestore.
642 for (inst_iterator It = inst_begin(&F), Ie = inst_end(&F); It != Ie;) {
643 Instruction *I = &*(It++);
644 auto II = dyn_cast(I);
645 if (!II)
646 continue;
647
648 if (II->getIntrinsicID() == Intrinsic::stacksave) {
649 IRBuilder<> IRB(II);
650 Instruction *LI = IRB.CreateLoad(UnsafeStackPtr);
651 LI->takeName(II);
652 II->replaceAllUsesWith(LI);
653 II->eraseFromParent();
654 } else if (II->getIntrinsicID() == Intrinsic::stackrestore) {
655 IRBuilder<> IRB(II);
656 Instruction *SI = IRB.CreateStore(II->getArgOperand(0), UnsafeStackPtr);
657 SI->takeName(II);
658 assert(II->use_empty());
659 II->eraseFromParent();
660 }
661 }
662 }
663 }
664
665 bool SafeStack::runOnFunction(Function &F) {
666 DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
667
668 if (!F.hasFnAttribute(Attribute::SafeStack)) {
669 DEBUG(dbgs() << "[SafeStack] safestack is not requested"
670 " for this function\n");
671 return false;
672 }
673
674 if (F.isDeclaration()) {
675 DEBUG(dbgs() << "[SafeStack] function definition"
676 " is not available\n");
677 return false;
678 }
679
680 TL = TM ? TM->getSubtargetImpl(F)->getTargetLowering() : nullptr;
681 SE = &getAnalysis().getSE();
682
683 {
684 // Make sure the regular stack protector won't run on this function
685 // (safestack attribute takes precedence).
686 AttrBuilder B;
687 B.addAttribute(Attribute::StackProtect)
688 .addAttribute(Attribute::StackProtectReq)
689 .addAttribute(Attribute::StackProtectStrong);
690 F.removeAttributes(
691 AttributeSet::FunctionIndex,
692 AttributeSet::get(F.getContext(), AttributeSet::FunctionIndex, B));
693 }
694
695 ++NumFunctions;
696
697 SmallVector StaticAllocas;
698 SmallVector DynamicAllocas;
699 SmallVector ByValArguments;
700 SmallVector Returns;
701
702 // Collect all points where stack gets unwound and needs to be restored
703 // This is only necessary because the runtime (setjmp and unwind code) is
704 // not aware of the unsafe stack and won't unwind/restore it prorerly.
705 // To work around this problem without changing the runtime, we insert
706 // instrumentation to restore the unsafe stack pointer when necessary.
707 SmallVector StackRestorePoints;
708
709 // Find all static and dynamic alloca instructions that must be moved to the
710 // unsafe stack, all return instructions and stack restore points.
711 findInsts(F, StaticAllocas, DynamicAllocas, ByValArguments, Returns,
712 StackRestorePoints);
713
714 if (StaticAllocas.empty() && DynamicAllocas.empty() &&
715 ByValArguments.empty() && StackRestorePoints.empty())
716 return false; // Nothing to do in this function.
717
718 if (!StaticAllocas.empty() || !DynamicAllocas.empty() ||
719 !ByValArguments.empty())
720 ++NumUnsafeStackFunctions; // This function has the unsafe stack.
721
722 if (!StackRestorePoints.empty())
723 ++NumUnsafeStackRestorePointsFunctions;
724
725 IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt());
726 UnsafeStackPtr = getOrCreateUnsafeStackPtr(IRB, F);
727
728 // The top of the unsafe stack after all unsafe static allocas are allocated.
729 Value *StaticTop = moveStaticAllocasToUnsafeStack(IRB, F, StaticAllocas,
730 ByValArguments, Returns);
731
732 // Safe stack object that stores the current unsafe stack top. It is updated
733 // as unsafe dynamic (non-constant-sized) allocas are allocated and freed.
734 // This is only needed if we need to restore stack pointer after longjmp
735 // or exceptions, and we have dynamic allocations.
736 // FIXME: a better alternative might be to store the unsafe stack pointer
737 // before setjmp / invoke instructions.
738 AllocaInst *DynamicTop = createStackRestorePoints(
739 IRB, F, StackRestorePoints, StaticTop, !DynamicAllocas.empty());
740
741 // Handle dynamic allocas.
742 moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop,
743 DynamicAllocas);
744
745 DEBUG(dbgs() << "[SafeStack] safestack applied\n");
746 return true;
747 }
748
749 } // anonymous namespace
750
751 char SafeStack::ID = 0;
752 INITIALIZE_TM_PASS_BEGIN(SafeStack, "safe-stack",
753 "Safe Stack instrumentation pass", false, false)
754 INITIALIZE_TM_PASS_END(SafeStack, "safe-stack",
755 "Safe Stack instrumentation pass", false, false)
756
757 FunctionPass *llvm::createSafeStackPass(const llvm::TargetMachine *TM) {
758 return new SafeStack(TM);
759 }
328328 initializeRewriteSymbolsPass(Registry);
329329 initializeWinEHPreparePass(Registry);
330330 initializeDwarfEHPreparePass(Registry);
331 initializeSafeStackPass(Registry);
331332 initializeSjLjEHPreparePass(Registry);
332333
333334 #ifdef LINK_POLLY_INTO_TOOLS