llvm.org GIT mirror llvm / 1a7750e
[SSP] Remove llvm.stackprotectorcheck. This is a cleanup patch for SSP support in LLVM. There is no functional change. llvm.stackprotectorcheck is not needed, because SelectionDAG isn't actually lowering it in SelectBasicBlock; rather, it adds check code in FinishBasicBlock, ignoring the position where the intrinsic is inserted (See FindSplitPointForStackProtector()). git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@265851 91177308-0d34-0410-b5e6-96231b3b80d8 Tim Shen 3 years ago
15 changed file(s) with 150 addition(s) and 183 deletion(s). Raw diff Collapse all Expand all
1194511945 different, then ``llvm.stackprotectorcheck`` causes the program to abort by
1194611946 calling the ``__stack_chk_fail()`` function.
1194711947
11948 '``llvm.stackprotectorcheck``' Intrinsic
11949 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
11950
11951 Syntax:
11952 """""""
11953
11954 ::
11955
11956 declare void @llvm.stackprotectorcheck(i8** )
11957
11958 Overview:
11959 """""""""
11960
11961 The ``llvm.stackprotectorcheck`` intrinsic compares ``guard`` against an already
11962 created stack protector and if they are not equal calls the
11963 ``__stack_chk_fail()`` function.
11964
11965 Arguments:
11966 """"""""""
11967
11968 The ``llvm.stackprotectorcheck`` intrinsic requires one pointer argument, the
11969 the variable ``@__stack_chk_guard``.
11970
11971 Semantics:
11972 """"""""""
11973
11974 This intrinsic is provided to perform the stack protector check by comparing
11975 ``guard`` with the stack slot created by ``llvm.stackprotector`` and if the
11976 values do not match call the ``__stack_chk_fail()`` function.
11977
11978 The reason to provide this as an IR level intrinsic instead of implementing it
11979 via other IR operations is that in order to perform this operation at the IR
11980 level without an intrinsic, one would need to create additional basic blocks to
11981 handle the success/failure cases. This makes it difficult to stop the stack
11982 protector check from disrupting sibling tail calls in Codegen. With this
11983 intrinsic, we are able to generate the stack protector basic blocks late in
11984 codegen after the tail call decision has occurred.
11985
1198611948 '``llvm.objectsize``' Intrinsic
1198711949 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1198811950
7474 /// times.
7575 SmallPtrSet VisitedPHIs;
7676
77 // A prologue is generated.
78 bool HasPrologue = false;
79
80 // IR checking code is generated.
81 bool HasIRCheck = false;
82
7783 /// InsertStackProtectors - Insert code into the prologue and epilogue of
7884 /// the function.
7985 ///
119125 }
120126
121127 SSPLayoutKind getSSPLayout(const AllocaInst *AI) const;
128
129 // Return true if StackProtector is supposed to be handled by SelectionDAG.
130 bool shouldEmitSDCheck(const BasicBlock &BB) const;
131
122132 void adjustForColoring(const AllocaInst *From, const AllocaInst *To);
123133
124134 bool runOnFunction(Function &Fn) override;
323323 // Stack Protector Intrinsic - The stackprotector intrinsic writes the stack
324324 // guard to the correct place on the stack frame.
325325 def int_stackprotector : Intrinsic<[], [llvm_ptr_ty, llvm_ptrptr_ty], []>;
326 def int_stackprotectorcheck : Intrinsic<[], [llvm_ptrptr_ty],
327 [IntrReadWriteArgMem]>;
328326
329327 // A counter increment for instrumentation based profiling.
330328 def int_instrprof_increment : Intrinsic<[],
10101010 return PrefLoopAlignment;
10111011 }
10121012
1013 /// If the target has a standard location for the stack protector cookie,
1013 /// If the target has a standard location for the stack protector guard,
10141014 /// returns the address of that location. Otherwise, returns nullptr.
1015 virtual Value *getStackCookieLocation(IRBuilder<> &IRB) const {
1016 return nullptr;
1017 }
1015 virtual Value *getIRStackGuard(IRBuilder<> &IRB) const;
1016
1017 /// Inserts necessary declarations for SSP purpose. Should be used only when
1018 /// getIRStackGuard returns nullptr.
1019 virtual void insertSSPDeclarations(Module &M) const;
1020
1021 /// Return the variable that's previously inserted by insertSSPDeclarations,
1022 /// if any, otherwise return nullptr. Should be used only when
1023 /// getIRStackGuard returns nullptr.
1024 virtual Value *getSDStackGuard(const Module &M) const;
10181025
10191026 /// If the target has a standard location for the unsafe stack pointer,
10201027 /// returns the address of that location. Otherwise, returns nullptr.
20132013 MachineFrameInfo *MFI = ParentBB->getParent()->getFrameInfo();
20142014 int FI = MFI->getStackProtectorIndex();
20152015
2016 const Value *IRGuard = SPD.getGuard();
2016 const Module &M = *ParentBB->getParent()->getFunction()->getParent();
2017 const Value *IRGuard = TLI.getSDStackGuard(M);
2018 assert(IRGuard && "Currently there must be an IR guard in order to use "
2019 "SelectionDAG SSP");
20172020 SDValue GuardPtr = getValue(IRGuard);
20182021 SDValue StackSlotPtr = DAG.getFrameIndex(FI, PtrTy);
20192022
55165519 case Intrinsic::invariant_end:
55175520 // Discard region information.
55185521 return nullptr;
5519 case Intrinsic::stackprotectorcheck: {
5520 // Do not actually emit anything for this basic block. Instead we initialize
5521 // the stack protector descriptor and export the guard variable so we can
5522 // access it in FinishBasicBlock.
5523 const BasicBlock *BB = I.getParent();
5524 SPDescriptor.initialize(BB, FuncInfo.MBBMap[BB], I);
5525 ExportFromCurrentBlock(SPDescriptor.getGuard());
5526
5527 // Flush our exports since we are going to process a terminator.
5528 (void)getControlRoot();
5529 return nullptr;
5530 }
55315522 case Intrinsic::clear_cache:
55325523 return TLI.getClearCacheBuiltinName();
55335524 case Intrinsic::donothing:
463463 /// the same function, use the same failure basic block).
464464 class StackProtectorDescriptor {
465465 public:
466 StackProtectorDescriptor() : ParentMBB(nullptr), SuccessMBB(nullptr),
467 FailureMBB(nullptr), Guard(nullptr),
468 GuardReg(0) { }
466 StackProtectorDescriptor()
467 : ParentMBB(nullptr), SuccessMBB(nullptr), FailureMBB(nullptr),
468 GuardReg(0) {}
469469
470470 /// Returns true if all fields of the stack protector descriptor are
471471 /// initialized implying that we should/are ready to emit a stack protector.
472472 bool shouldEmitStackProtector() const {
473 return ParentMBB && SuccessMBB && FailureMBB && Guard;
473 return ParentMBB && SuccessMBB && FailureMBB;
474474 }
475475
476476 /// Initialize the stack protector descriptor structure for a new basic
477477 /// block.
478 void initialize(const BasicBlock *BB,
479 MachineBasicBlock *MBB,
480 const CallInst &StackProtCheckCall) {
478 void initialize(const BasicBlock *BB, MachineBasicBlock *MBB) {
481479 // Make sure we are not initialized yet.
482480 assert(!shouldEmitStackProtector() && "Stack Protector Descriptor is "
483481 "already initialized!");
484482 ParentMBB = MBB;
485483 SuccessMBB = AddSuccessorMBB(BB, MBB, /* IsLikely */ true);
486484 FailureMBB = AddSuccessorMBB(BB, MBB, /* IsLikely */ false, FailureMBB);
487 if (!Guard)
488 Guard = StackProtCheckCall.getArgOperand(0);
489485 }
490486
491487 /// Reset state that changes when we handle different basic blocks.
514510 /// always the same.
515511 void resetPerFunctionState() {
516512 FailureMBB = nullptr;
517 Guard = nullptr;
518513 GuardReg = 0;
519514 }
520515
521516 MachineBasicBlock *getParentMBB() { return ParentMBB; }
522517 MachineBasicBlock *getSuccessMBB() { return SuccessMBB; }
523518 MachineBasicBlock *getFailureMBB() { return FailureMBB; }
524 const Value *getGuard() { return Guard; }
525519
526520 unsigned getGuardReg() const { return GuardReg; }
527521 void setGuardReg(unsigned R) { GuardReg = R; }
543537 /// This basic block visited on stack protector check failure that will
544538 /// contain a call to __stack_chk_fail().
545539 MachineBasicBlock *FailureMBB;
546
547 /// The guard variable which we will compare against the stored value in the
548 /// stack protector stack slot.
549 const Value *Guard;
550540
551541 /// The virtual register holding the stack guard value.
552542 unsigned GuardReg;
1010 //
1111 //===----------------------------------------------------------------------===//
1212
13 #include "llvm/CodeGen/GCStrategy.h"
13 #include "llvm/CodeGen/SelectionDAG.h"
1414 #include "ScheduleDAGSDNodes.h"
1515 #include "SelectionDAGBuilder.h"
1616 #include "llvm/ADT/PostOrderIterator.h"
2424 #include "llvm/CodeGen/FastISel.h"
2525 #include "llvm/CodeGen/FunctionLoweringInfo.h"
2626 #include "llvm/CodeGen/GCMetadata.h"
27 #include "llvm/CodeGen/GCStrategy.h"
2728 #include "llvm/CodeGen/MachineFrameInfo.h"
2829 #include "llvm/CodeGen/MachineFunction.h"
2930 #include "llvm/CodeGen/MachineInstrBuilder.h"
3132 #include "llvm/CodeGen/MachineRegisterInfo.h"
3233 #include "llvm/CodeGen/ScheduleHazardRecognizer.h"
3334 #include "llvm/CodeGen/SchedulerRegistry.h"
34 #include "llvm/CodeGen/SelectionDAG.h"
3535 #include "llvm/CodeGen/SelectionDAGISel.h"
36 #include "llvm/CodeGen/StackProtector.h"
3637 #include "llvm/CodeGen/WinEHFuncInfo.h"
3738 #include "llvm/IR/Constants.h"
3839 #include "llvm/IR/DebugInfo.h"
376377 void SelectionDAGISel::getAnalysisUsage(AnalysisUsage &AU) const {
377378 AU.addRequired();
378379 AU.addRequired();
380 AU.addRequired();
381 AU.addPreserved();
379382 AU.addPreserved();
380383 AU.addRequired();
381384 if (UseMBPI && OptLevel != CodeGenOpt::None)
14751478 LowerArguments(Fn);
14761479 }
14771480 }
1481 if (getAnalysis().shouldEmitSDCheck(*LLVMBB))
1482 SDB->SPDescriptor.initialize(LLVMBB, FuncInfo->MBBMap[LLVMBB]);
14781483
14791484 if (Begin != BI)
14801485 ++NumDAGBlocks;
8888 getAnalysisIfAvailable();
8989 DT = DTWP ? &DTWP->getDomTree() : nullptr;
9090 TLI = TM->getSubtargetImpl(Fn)->getTargetLowering();
91 HasPrologue = false;
92 HasIRCheck = false;
9193
9294 Attribute Attr = Fn.getFnAttribute("stack-protector-buffer-size");
9395 if (Attr.isStringAttribute() &&
199201 bool StackProtector::RequiresStackProtector() {
200202 bool Strong = false;
201203 bool NeedsProtector = false;
204 for (const BasicBlock &BB : *F)
205 for (const Instruction &I : BB)
206 if (const CallInst *CI = dyn_cast(&I))
207 if (CI->getCalledFunction() ==
208 Intrinsic::getDeclaration(F->getParent(),
209 Intrinsic::stackprotector))
210 HasPrologue = true;
211
202212 if (F->hasFnAttribute(Attribute::StackProtectReq)) {
203213 NeedsProtector = true;
204214 Strong = true; // Use the same heuristic as strong to determine SSPLayout
205215 } else if (F->hasFnAttribute(Attribute::StackProtectStrong))
206216 Strong = true;
217 else if (HasPrologue)
218 NeedsProtector = true;
207219 else if (!F->hasFnAttribute(Attribute::StackProtect))
208220 return false;
209221
255267 return NeedsProtector;
256268 }
257269
258 static bool InstructionWillNotHaveChain(const Instruction *I) {
259 return !I->mayHaveSideEffects() && !I->mayReadFromMemory() &&
260 isSafeToSpeculativelyExecute(I);
261 }
262
263 /// Identify if RI has a previous instruction in the "Tail Position" and return
264 /// it. Otherwise return 0.
265 ///
266 /// This is based off of the code in llvm::isInTailCallPosition. The difference
267 /// is that it inverts the first part of llvm::isInTailCallPosition since
268 /// isInTailCallPosition is checking if a call is in a tail call position, and
269 /// we are searching for an unknown tail call that might be in the tail call
270 /// position. Once we find the call though, the code uses the same refactored
271 /// code, returnTypeIsEligibleForTailCall.
272 static CallInst *FindPotentialTailCall(BasicBlock *BB, ReturnInst *RI,
273 const TargetLoweringBase *TLI) {
274 // Establish a reasonable upper bound on the maximum amount of instructions we
275 // will look through to find a tail call.
276 unsigned SearchCounter = 0;
277 const unsigned MaxSearch = 4;
278 bool NoInterposingChain = true;
279
280 for (BasicBlock::reverse_iterator I = std::next(BB->rbegin()), E = BB->rend();
281 I != E && SearchCounter < MaxSearch; ++I) {
282 Instruction *Inst = &*I;
283
284 // Skip over debug intrinsics and do not allow them to affect our MaxSearch
285 // counter.
286 if (isa(Inst))
287 continue;
288
289 // If we find a call and the following conditions are satisifed, then we
290 // have found a tail call that satisfies at least the target independent
291 // requirements of a tail call:
292 //
293 // 1. The call site has the tail marker.
294 //
295 // 2. The call site either will not cause the creation of a chain or if a
296 // chain is necessary there are no instructions in between the callsite and
297 // the call which would create an interposing chain.
298 //
299 // 3. The return type of the function does not impede tail call
300 // optimization.
301 if (CallInst *CI = dyn_cast(Inst)) {
302 if (CI->isTailCall() &&
303 (InstructionWillNotHaveChain(CI) || NoInterposingChain) &&
304 returnTypeIsEligibleForTailCall(BB->getParent(), CI, RI, *TLI))
305 return CI;
306 }
307
308 // If we did not find a call see if we have an instruction that may create
309 // an interposing chain.
310 NoInterposingChain =
311 NoInterposingChain && InstructionWillNotHaveChain(Inst);
312
313 // Increment max search.
314 SearchCounter++;
315 }
316
317 return nullptr;
318 }
319
320270 /// Insert code into the entry block that stores the __stack_chk_guard
321271 /// variable onto the stack:
322272 ///
328278 /// Returns true if the platform/triple supports the stackprotectorcreate pseudo
329279 /// node.
330280 static bool CreatePrologue(Function *F, Module *M, ReturnInst *RI,
331 const TargetLoweringBase *TLI, const Triple &TT,
332 AllocaInst *&AI, Value *&StackGuardVar) {
281 const TargetLoweringBase *TLI, AllocaInst *&AI,
282 Value *&StackGuardVar) {
333283 bool SupportsSelectionDAGSP = false;
284 IRBuilder<> B(&F->getEntryBlock().front());
285
286 StackGuardVar = TLI->getIRStackGuard(B);
287 if (!StackGuardVar) {
288 /// Use SelectionDAG SSP handling, since there isn't an IR guard.
289 SupportsSelectionDAGSP = true;
290 TLI->insertSSPDeclarations(*M);
291 StackGuardVar = TLI->getSDStackGuard(*M);
292 }
293 assert(StackGuardVar && "Must have stack guard available");
294
334295 PointerType *PtrTy = Type::getInt8PtrTy(RI->getContext());
335 IRBuilder<> B(&F->getEntryBlock().front());
336
337 StackGuardVar = TLI->getStackCookieLocation(B);
338 if (!StackGuardVar) {
339 if (TT.isOSOpenBSD()) {
340 StackGuardVar = M->getOrInsertGlobal("__guard_local", PtrTy);
341 cast(StackGuardVar)
342 ->setVisibility(GlobalValue::HiddenVisibility);
343 } else {
344 SupportsSelectionDAGSP = true;
345 StackGuardVar = M->getOrInsertGlobal("__stack_chk_guard", PtrTy);
346 }
347 }
348
349296 AI = B.CreateAlloca(PtrTy, nullptr, "StackGuardSlot");
350297 LoadInst *LI = B.CreateLoad(StackGuardVar, "StackGuard");
351298 B.CreateCall(Intrinsic::getDeclaration(M, Intrinsic::stackprotector),
352299 {LI, AI});
353
354300 return SupportsSelectionDAGSP;
355301 }
356302
361307 /// - The epilogue checks the value stored in the prologue against the original
362308 /// value. It calls __stack_chk_fail if they differ.
363309 bool StackProtector::InsertStackProtectors() {
364 bool HasPrologue = false;
365310 bool SupportsSelectionDAGSP =
366311 EnableSelectionDAGSP && !TM->Options.EnableFastISel;
367312 AllocaInst *AI = nullptr; // Place on stack that stores the stack guard.
376321 if (!HasPrologue) {
377322 HasPrologue = true;
378323 SupportsSelectionDAGSP &=
379 CreatePrologue(F, M, RI, TLI, Trip, AI, StackGuardVar);
380 }
381
382 if (SupportsSelectionDAGSP) {
383 // Since we have a potential tail call, insert the special stack check
384 // intrinsic.
385 Instruction *InsertionPt = nullptr;
386 if (CallInst *CI = FindPotentialTailCall(BB, RI, TLI)) {
387 InsertionPt = CI;
388 } else {
389 InsertionPt = RI;
390 // At this point we know that BB has a return statement so it *DOES*
391 // have a terminator.
392 assert(InsertionPt != nullptr &&
393 "BB must have a terminator instruction at this point.");
394 }
395
396 Function *Intrinsic =
397 Intrinsic::getDeclaration(M, Intrinsic::stackprotectorcheck);
398 CallInst::Create(Intrinsic, StackGuardVar, "", InsertionPt);
399 } else {
324 CreatePrologue(F, M, RI, TLI, AI, StackGuardVar);
325 }
326
327 if (!SupportsSelectionDAGSP) {
400328 // If we do not support SelectionDAG based tail calls, generate IR level
401329 // tail calls.
402330 //
426354 // merge pass will merge together all of the various BB into one including
427355 // fail BB generated by the stack protector pseudo instruction.
428356 BasicBlock *FailBB = CreateFailBB();
357
358 // Set HasIRCheck to true, so that SelectionDAG will not generate its own
359 // version.
360 HasIRCheck = true;
429361
430362 // Split the basic block before the return instruction.
431363 BasicBlock *NewBB = BB->splitBasicBlock(RI->getIterator(), "SP_return");
486418 B.CreateUnreachable();
487419 return FailBB;
488420 }
421
422 bool StackProtector::shouldEmitSDCheck(const BasicBlock &BB) const {
423 return HasPrologue && !HasIRCheck && dyn_cast(BB.getTerminator());
424 }
17451745
17461746 return true;
17471747 }
1748
1749 //===----------------------------------------------------------------------===//
1750 // Stack Protector
1751 //===----------------------------------------------------------------------===//
1752
1753 // For OpenBSD return its special guard variable. Otherwise return nullptr,
1754 // so that SelectionDAG handle SSP.
1755 Value *TargetLoweringBase::getIRStackGuard(IRBuilder<> &IRB) const {
1756 if (getTargetMachine().getTargetTriple().isOSOpenBSD()) {
1757 Module &M = *IRB.GetInsertBlock()->getParent()->getParent();
1758 PointerType *PtrTy = Type::getInt8PtrTy(M.getContext());
1759 auto Guard = cast(M.getOrInsertGlobal("__guard_local", PtrTy));
1760 Guard->setVisibility(GlobalValue::HiddenVisibility);
1761 return Guard;
1762 }
1763 return nullptr;
1764 }
1765
1766 // Currently only support "standard" __stack_chk_guard.
1767 // TODO: add LOAD_STACK_GUARD support.
1768 void TargetLoweringBase::insertSSPDeclarations(Module &M) const {
1769 M.getOrInsertGlobal("__stack_chk_guard", Type::getInt8PtrTy(M.getContext()));
1770 }
1771
1772 // Currently only support "standard" __stack_chk_guard.
1773 // TODO: add LOAD_STACK_GUARD support.
1774 Value *TargetLoweringBase::getSDStackGuard(const Module &M) const {
1775 return M.getGlobalVariable("__stack_chk_guard");
1776 }
157157 }
158158 }
159159 break;
160
161 case 's':
162 if (Name == "stackprotectorcheck") {
163 NewFn = nullptr;
164 return true;
165 }
160166
161167 case 'x': {
162168 if (Name.startswith("x86.sse2.pcmpeq.") ||
644650
645651 Value *UndefV = UndefValue::get(Op0->getType());
646652 Rep = Builder.CreateShuffleVector(Op0, UndefV, ConstantVector::get(Idxs));
653 } else if (Name == "llvm.stackprotectorcheck") {
654 Rep = nullptr;
647655 } else {
648656 bool PD128 = false, PD256 = false, PS128 = false, PS256 = false;
649657 if (Name == "llvm.x86.avx.vpermil.pd.256")
683691 }
684692 }
685693
686 CI->replaceAllUsesWith(Rep);
694 if (Rep)
695 CI->replaceAllUsesWith(Rep);
687696 CI->eraseFromParent();
688697 return;
689698 }
1021110211 return false;
1021210212 }
1021310213
10214 Value *AArch64TargetLowering::getStackCookieLocation(IRBuilder<> &IRB) const {
10214 Value *AArch64TargetLowering::getIRStackGuard(IRBuilder<> &IRB) const {
1021510215 if (!Subtarget->isTargetAndroid())
10216 return TargetLowering::getStackCookieLocation(IRB);
10216 return TargetLowering::getIRStackGuard(IRB);
1021710217
1021810218 // Android provides a fixed TLS slot for the stack cookie. See the definition
1021910219 // of TLS_SLOT_STACK_GUARD in
359359
360360 /// If the target has a standard location for the stack protector cookie,
361361 /// returns the address of that location. Otherwise, returns nullptr.
362 Value *getStackCookieLocation(IRBuilder<> &IRB) const override;
362 Value *getIRStackGuard(IRBuilder<> &IRB) const override;
363363
364364 /// If the target has a standard location for the unsafe stack pointer,
365365 /// returns the address of that location. Otherwise, returns nullptr.
21922192 return 256;
21932193 }
21942194
2195 Value *X86TargetLowering::getStackCookieLocation(IRBuilder<> &IRB) const {
2195 Value *X86TargetLowering::getIRStackGuard(IRBuilder<> &IRB) const {
21962196 if (!Subtarget.isTargetLinux())
2197 return TargetLowering::getStackCookieLocation(IRB);
2197 return TargetLowering::getIRStackGuard(IRB);
21982198
21992199 // %fs:0x28, unless we're using a Kernel code model, in which case it's %gs:
22002200 // %gs:0x14 on i386
22032203 return ConstantExpr::getIntToPtr(
22042204 ConstantInt::get(Type::getInt32Ty(IRB.getContext()), Offset),
22052205 Type::getInt8PtrTy(IRB.getContext())->getPointerTo(AddressSpace));
2206 }
2207
2208 void X86TargetLowering::insertSSPDeclarations(Module &M) const {
2209 if (!Subtarget.isTargetLinux())
2210 TargetLowering::insertSSPDeclarations(M);
2211 else
2212 llvm_unreachable("X86 Linux supports customized IR stack guard load");
2213 }
2214
2215 Value *X86TargetLowering::getSDStackGuard(const Module &M) const {
2216 if (!Subtarget.isTargetLinux())
2217 return TargetLowering::getSDStackGuard(M);
2218 llvm_unreachable("X86 Linux supports customized IR stack guard load");
22062219 }
22072220
22082221 Value *X86TargetLowering::getSafeStackPointerLocation(IRBuilder<> &IRB) const {
961961
962962 /// If the target has a standard location for the stack protector cookie,
963963 /// returns the address of that location. Otherwise, returns nullptr.
964 Value *getStackCookieLocation(IRBuilder<> &IRB) const override;
964 Value *getIRStackGuard(IRBuilder<> &IRB) const override;
965
966 void insertSSPDeclarations(Module &M) const override;
967
968 Value *getSDStackGuard(const Module &M) const override;
965969
966970 /// Return true if the target stores SafeStack pointer at a fixed offset in
967971 /// some non-standard address space, and populates the address space and
5353 define i32 @test.objectsize() {
5454 ; CHECK-LABEL: @test.objectsize(
5555 ; CHECK: @llvm.objectsize.i32.p0i8
56 ; CHECK-DAG: declare i32 @llvm.objectsize.i32.p0i8
5756 %s = call i32 @llvm.objectsize.i32(i8* getelementptr inbounds ([60 x i8], [60 x i8]* @a, i32 0, i32 0), i1 false)
5857 ret i32 %s
5958 }
59
60 @__stack_chk_guard = external global i8*
61 declare void @llvm.stackprotectorcheck(i8**)
62
63 define void @test.stackprotectorcheck() {
64 ; CHECK-LABEL: @test.stackprotectorcheck(
65 ; CHECK-NEXT: ret void
66 call void @llvm.stackprotectorcheck(i8** @__stack_chk_guard)
67 ret void
68 }
69
70 ; This is part of @test.objectsize(), since llvm.objectsize declaration gets
71 ; emitted at the end.
72 ; CHECK: declare i32 @llvm.objectsize.i32.p0i8