llvm.org GIT mirror llvm / 5a130c5
InferAddressSpaces: Move target intrinsic handling to TTI I'm planning on handling intrinsics that will benefit from checking the address space enums. Don't bother moving the address collection for now, since those won't need th enums. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@368895 91177308-0d34-0410-b5e6-96231b3b80d8 Matt Arsenault a month ago
7 changed file(s) with 121 addition(s) and 26 deletion(s). Raw diff Collapse all Expand all
366366 /// \returns ~0u if the target does not have such a flat address space to
367367 /// optimize away.
368368 unsigned getFlatAddressSpace() const;
369
370 /// Return any intrinsic address operand indexes which may be rewritten if
371 /// they use a flat address space pointer.
372 ///
373 /// \returns true if the intrinsic was handled.
374 bool collectFlatAddressOperands(SmallVectorImpl &OpIndexes,
375 Intrinsic::ID IID) const;
376
377 /// Rewrite intrinsic call \p II such that \p OldV will be replaced with \p
378 /// NewV, which has a different address space. This should happen for every
379 /// operand index that collectFlatAddressOperands returned for the intrinsic.
380 /// \returns true if the intrinsic /// was handled.
381 bool rewriteIntrinsicWithAddressSpace(IntrinsicInst *II,
382 Value *OldV, Value *NewV) const;
369383
370384 /// Test whether calls to a function lower to actual program function
371385 /// calls.
11591173 virtual bool isSourceOfDivergence(const Value *V) = 0;
11601174 virtual bool isAlwaysUniform(const Value *V) = 0;
11611175 virtual unsigned getFlatAddressSpace() = 0;
1176 virtual bool collectFlatAddressOperands(SmallVectorImpl &OpIndexes,
1177 Intrinsic::ID IID) const = 0;
1178 virtual bool rewriteIntrinsicWithAddressSpace(
1179 IntrinsicInst *II, Value *OldV, Value *NewV) const = 0;
11621180 virtual bool isLoweredToCall(const Function *F) = 0;
11631181 virtual void getUnrollingPreferences(Loop *L, ScalarEvolution &,
11641182 UnrollingPreferences &UP) = 0;
13991417 return Impl.getFlatAddressSpace();
14001418 }
14011419
1420 bool collectFlatAddressOperands(SmallVectorImpl &OpIndexes,
1421 Intrinsic::ID IID) const override {
1422 return Impl.collectFlatAddressOperands(OpIndexes, IID);
1423 }
1424
1425 bool rewriteIntrinsicWithAddressSpace(
1426 IntrinsicInst *II, Value *OldV, Value *NewV) const override {
1427 return Impl.rewriteIntrinsicWithAddressSpace(II, OldV, NewV);
1428 }
1429
14021430 bool isLoweredToCall(const Function *F) override {
14031431 return Impl.isLoweredToCall(F);
14041432 }
153153
154154 unsigned getFlatAddressSpace () {
155155 return -1;
156 }
157
158 bool collectFlatAddressOperands(SmallVectorImpl &OpIndexes,
159 Intrinsic::ID IID) const {
160 return false;
161 }
162
163 bool rewriteIntrinsicWithAddressSpace(IntrinsicInst *II,
164 Value *OldV, Value *NewV) const {
165 return false;
156166 }
157167
158168 bool isLoweredToCall(const Function *F) {
212212 unsigned getFlatAddressSpace() {
213213 // Return an invalid address space.
214214 return -1;
215 }
216
217 bool collectFlatAddressOperands(SmallVectorImpl &OpIndexes,
218 Intrinsic::ID IID) const {
219 return false;
220 }
221
222 bool rewriteIntrinsicWithAddressSpace(IntrinsicInst *II,
223 Value *OldV, Value *NewV) const {
224 return false;
215225 }
216226
217227 bool isLegalAddImmediate(int64_t imm) {
224224
225225 unsigned TargetTransformInfo::getFlatAddressSpace() const {
226226 return TTIImpl->getFlatAddressSpace();
227 }
228
229 bool TargetTransformInfo::collectFlatAddressOperands(
230 SmallVectorImpl &OpIndexes, Intrinsic::ID IID) const {
231 return TTIImpl->collectFlatAddressOperands(OpIndexes, IID);
232 }
233
234 bool TargetTransformInfo::rewriteIntrinsicWithAddressSpace(
235 IntrinsicInst *II, Value *OldV, Value *NewV) const {
236 return TTIImpl->rewriteIntrinsicWithAddressSpace(II, OldV, NewV);
227237 }
228238
229239 bool TargetTransformInfo::isLoweredToCall(const Function *F) const {
589589 return false;
590590 }
591591
592 bool GCNTTIImpl::collectFlatAddressOperands(SmallVectorImpl &OpIndexes,
593 Intrinsic::ID IID) const {
594 switch (IID) {
595 case Intrinsic::amdgcn_atomic_inc:
596 case Intrinsic::amdgcn_atomic_dec:
597 case Intrinsic::amdgcn_ds_fadd:
598 case Intrinsic::amdgcn_ds_fmin:
599 case Intrinsic::amdgcn_ds_fmax:
600 OpIndexes.push_back(0);
601 return true;
602 default:
603 return false;
604 }
605 }
606
607 bool GCNTTIImpl::rewriteIntrinsicWithAddressSpace(
608 IntrinsicInst *II, Value *OldV, Value *NewV) const {
609 switch (II->getIntrinsicID()) {
610 case Intrinsic::amdgcn_atomic_inc:
611 case Intrinsic::amdgcn_atomic_dec:
612 case Intrinsic::amdgcn_ds_fadd:
613 case Intrinsic::amdgcn_ds_fmin:
614 case Intrinsic::amdgcn_ds_fmax: {
615 const ConstantInt *IsVolatile = cast(II->getArgOperand(4));
616 if (!IsVolatile->isZero())
617 return false;
618 Module *M = II->getParent()->getParent()->getParent();
619 Type *DestTy = II->getType();
620 Type *SrcTy = NewV->getType();
621 Function *NewDecl =
622 Intrinsic::getDeclaration(M, II->getIntrinsicID(), {DestTy, SrcTy});
623 II->setArgOperand(0, NewV);
624 II->setCalledFunction(NewDecl);
625 return true;
626 }
627 default:
628 return false;
629 }
630 }
631
592632 unsigned GCNTTIImpl::getShuffleCost(TTI::ShuffleKind Kind, Type *Tp, int Index,
593633 Type *SubTp) {
594634 if (ST->hasVOP3PInsts()) {
181181 return -1;
182182 return AMDGPUAS::FLAT_ADDRESS;
183183 }
184
185 bool collectFlatAddressOperands(SmallVectorImpl &OpIndexes,
186 Intrinsic::ID IID) const;
187 bool rewriteIntrinsicWithAddressSpace(IntrinsicInst *II,
188 Value *OldV, Value *NewV) const;
184189
185190 unsigned getVectorSplitCost() { return 0; }
186191
140140
141141 /// InferAddressSpaces
142142 class InferAddressSpaces : public FunctionPass {
143 const TargetTransformInfo *TTI;
144
143145 /// Target specific address space which uses of should be replaced if
144146 /// possible.
145147 unsigned FlatAddrSpace;
263265 Module *M = II->getParent()->getParent()->getParent();
264266
265267 switch (II->getIntrinsicID()) {
266 case Intrinsic::amdgcn_atomic_inc:
267 case Intrinsic::amdgcn_atomic_dec:
268 case Intrinsic::amdgcn_ds_fadd:
269 case Intrinsic::amdgcn_ds_fmin:
270 case Intrinsic::amdgcn_ds_fmax: {
271 const ConstantInt *IsVolatile = cast(II->getArgOperand(4));
272 if (!IsVolatile->isZero())
273 return false;
274
275 LLVM_FALLTHROUGH;
276 }
277268 case Intrinsic::objectsize: {
278269 Type *DestTy = II->getType();
279270 Type *SrcTy = NewV->getType();
284275 return true;
285276 }
286277 default:
287 return false;
288 }
289 }
290
291 // TODO: Move logic to TTI?
278 return TTI->rewriteIntrinsicWithAddressSpace(II, OldV, NewV);
279 }
280 }
281
292282 void InferAddressSpaces::collectRewritableIntrinsicOperands(
293283 IntrinsicInst *II, std::vector> &PostorderStack,
294284 DenseSet &Visited) const {
295 switch (II->getIntrinsicID()) {
285 auto IID = II->getIntrinsicID();
286 switch (IID) {
296287 case Intrinsic::objectsize:
297 case Intrinsic::amdgcn_atomic_inc:
298 case Intrinsic::amdgcn_atomic_dec:
299 case Intrinsic::amdgcn_ds_fadd:
300 case Intrinsic::amdgcn_ds_fmin:
301 case Intrinsic::amdgcn_ds_fmax:
302288 appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(0),
303289 PostorderStack, Visited);
304290 break;
305291 default:
292 SmallVector OpIndexes;
293 if (TTI->collectFlatAddressOperands(OpIndexes, IID)) {
294 for (int Idx : OpIndexes) {
295 appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(Idx),
296 PostorderStack, Visited);
297 }
298 }
306299 break;
307300 }
308301 }
630623 if (skipFunction(F))
631624 return false;
632625
633 const TargetTransformInfo &TTI =
634 getAnalysis().getTTI(F);
626 TTI = &getAnalysis().getTTI(F);
635627
636628 if (FlatAddrSpace == UninitializedAddressSpace) {
637 FlatAddrSpace = TTI.getFlatAddressSpace();
629 FlatAddrSpace = TTI->getFlatAddressSpace();
638630 if (FlatAddrSpace == UninitializedAddressSpace)
639631 return false;
640632 }
649641
650642 // Changes the address spaces of the flat address expressions who are inferred
651643 // to point to a specific address space.
652 return rewriteWithNewAddressSpaces(TTI, Postorder, InferredAddrSpace, &F);
644 return rewriteWithNewAddressSpaces(*TTI, Postorder, InferredAddrSpace, &F);
653645 }
654646
655647 // Constants need to be tracked through RAUW to handle cases with nested