llvm.org GIT mirror llvm / cb7ead2
[Allocator] Switch the BumpPtrAllocator to use a vector of pointers to slabs rather than embedding a singly linked list in the slabs themselves. This has a few advantages: - Better utilization of the slab's memory by not wasting 16-bytes at the front. - Simpler allocation strategy by not having a struct packed at the front. - Avoids paging every allocated slab in just to traverse them for deallocating or dumping stats. The latter is the really nice part. Folks have complained from time to time bitterly that tearing down a BumpPtrAllocator, even if it doesn't run any destructors, pages in all of the memory allocated. Now it won't. =] Also resolves a FIXME with the scaling of the slab sizes. The scaling now disregards specially sized slabs for allocations larger than the threshold. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@206147 91177308-0d34-0410-b5e6-96231b3b80d8 Chandler Carruth 6 years ago
4 changed file(s) with 167 addition(s) and 155 deletion(s). Raw diff Collapse all Expand all
1313 #ifndef LLVM_SUPPORT_ALLOCATOR_H
1414 #define LLVM_SUPPORT_ALLOCATOR_H
1515
16 #include "llvm/ADT/SmallVector.h"
1617 #include "llvm/Support/AlignOf.h"
1718 #include "llvm/Support/DataTypes.h"
1819 #include "llvm/Support/MathExtras.h"
5253 void PrintStats() const {}
5354 };
5455
55 /// MemSlab - This structure lives at the beginning of every slab allocated by
56 /// the bump allocator.
57 class MemSlab {
58 public:
59 size_t Size;
60 MemSlab *NextPtr;
61 };
62
6356 /// SlabAllocator - This class can be used to parameterize the underlying
6457 /// allocation strategy for the bump allocator. In particular, this is used
6558 /// by the JIT to allocate contiguous swathes of executable memory. The
6861 class SlabAllocator {
6962 public:
7063 virtual ~SlabAllocator();
71 virtual MemSlab *Allocate(size_t Size) = 0;
72 virtual void Deallocate(MemSlab *Slab) = 0;
64 virtual void *Allocate(size_t Size) = 0;
65 virtual void Deallocate(void *Slab, size_t Size) = 0;
7366 };
7467
7568 /// MallocSlabAllocator - The default slab allocator for the bump allocator
8376 public:
8477 MallocSlabAllocator() : Allocator() {}
8578 virtual ~MallocSlabAllocator();
86 MemSlab *Allocate(size_t Size) override;
87 void Deallocate(MemSlab *Slab) override;
88 };
89
90 /// \brief Non-templated base class for the \c BumpPtrAllocatorImpl template.
91 class BumpPtrAllocatorBase {
92 public:
93 void Deallocate(const void * /*Ptr*/) {}
94 void PrintStats() const;
95
96 /// \brief Returns the total physical memory allocated by this allocator.
97 size_t getTotalMemory() const;
98
99 protected:
100 /// \brief The slab that we are currently allocating into.
101 MemSlab *CurSlab;
102
103 /// \brief How many bytes we've allocated.
104 ///
105 /// Used so that we can compute how much space was wasted.
106 size_t BytesAllocated;
107
108 BumpPtrAllocatorBase() : CurSlab(nullptr), BytesAllocated(0) {}
79 void *Allocate(size_t Size) override;
80 void Deallocate(void *Slab, size_t Size) override;
10981 };
11082
11183 /// \brief Allocate memory in an ever growing pool, as if by bump-pointer.
11991 /// Note that this also has a threshold for forcing allocations above a certain
12092 /// size into their own slab.
12193 template
122 class BumpPtrAllocatorImpl : public BumpPtrAllocatorBase {
94 class BumpPtrAllocatorImpl {
12395 BumpPtrAllocatorImpl(const BumpPtrAllocatorImpl &) LLVM_DELETED_FUNCTION;
12496 void operator=(const BumpPtrAllocatorImpl &) LLVM_DELETED_FUNCTION;
12597
130102 "allocation.");
131103
132104 BumpPtrAllocatorImpl()
133 : Allocator(DefaultSlabAllocator), NumSlabs(0) {}
105 : CurPtr(nullptr), End(nullptr), BytesAllocated(0),
106 Allocator(DefaultSlabAllocator) {}
134107 BumpPtrAllocatorImpl(SlabAllocator &Allocator)
135 : Allocator(Allocator), NumSlabs(0) {}
136 ~BumpPtrAllocatorImpl() { DeallocateSlabs(CurSlab); }
108 : CurPtr(nullptr), End(nullptr), BytesAllocated(0), Allocator(Allocator) {
109 }
110 ~BumpPtrAllocatorImpl() {
111 DeallocateSlabs(Slabs.begin(), Slabs.end());
112 DeallocateCustomSizedSlabs();
113 }
137114
138115 /// \brief Deallocate all but the current slab and reset the current pointer
139116 /// to the beginning of it, freeing all memory allocated so far.
140117 void Reset() {
141 if (!CurSlab)
118 if (Slabs.empty())
142119 return;
143 DeallocateSlabs(CurSlab->NextPtr);
144 CurSlab->NextPtr = nullptr;
145 CurPtr = (char *)(CurSlab + 1);
146 End = ((char *)CurSlab) + CurSlab->Size;
120
121 // Reset the state.
147122 BytesAllocated = 0;
123 CurPtr = (char *)Slabs.front();
124 End = CurPtr + SlabSize;
125
126 // Deallocate all but the first slab, and all custome sized slabs.
127 DeallocateSlabs(std::next(Slabs.begin()), Slabs.end());
128 Slabs.erase(std::next(Slabs.begin()), Slabs.end());
129 DeallocateCustomSizedSlabs();
130 CustomSizedSlabs.clear();
148131 }
149132
150133 /// \brief Allocate space at the specified alignment.
151134 void *Allocate(size_t Size, size_t Alignment) {
152 if (!CurSlab) // Start a new slab if we haven't allocated one already.
135 if (!CurPtr) // Start a new slab if we haven't allocated one already.
153136 StartNewSlab();
154137
155138 // Keep track of how many bytes we've allocated.
173156 }
174157
175158 // If Size is really big, allocate a separate slab for it.
176 size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1;
159 size_t PaddedSize = Size + Alignment - 1;
177160 if (PaddedSize > SizeThreshold) {
178 ++NumSlabs;
179 MemSlab *NewSlab = Allocator.Allocate(PaddedSize);
180
181 // Put the new slab after the current slab, since we are not allocating
182 // into it.
183 NewSlab->NextPtr = CurSlab->NextPtr;
184 CurSlab->NextPtr = NewSlab;
185
186 Ptr = alignPtr((char *)(NewSlab + 1), Alignment);
187 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + NewSlab->Size);
161 void *NewSlab = Allocator.Allocate(PaddedSize);
162 CustomSizedSlabs.push_back(std::make_pair(NewSlab, PaddedSize));
163
164 Ptr = alignPtr((char *)NewSlab, Alignment);
165 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + PaddedSize);
188166 __msan_allocated_memory(Ptr, Size);
189167 return Ptr;
190168 }
216194 return static_cast(Allocate(Num * EltSize, Alignment));
217195 }
218196
219 size_t GetNumSlabs() const { return NumSlabs; }
197 void Deallocate(const void * /*Ptr*/) {}
198
199 size_t GetNumSlabs() const { return Slabs.size() + CustomSizedSlabs.size(); }
200
201 size_t getTotalMemory() const {
202 size_t TotalMemory = 0;
203 for (auto I = Slabs.begin(), E = Slabs.end(); I != E; ++I)
204 TotalMemory += computeSlabSize(std::distance(Slabs.begin(), I));
205 for (auto &PtrAndSize : CustomSizedSlabs)
206 TotalMemory += PtrAndSize.second;
207 return TotalMemory;
208 }
209
210 void PrintStats() const {
211 // We call out to an external function to actually print the message as the
212 // printing code uses Allocator.h in its implementation.
213 extern void printBumpPtrAllocatorStats(
214 unsigned NumSlabs, size_t BytesAllocated, size_t TotalMemory);
215
216 printBumpPtrAllocatorStats(Slabs.size(), BytesAllocated, getTotalMemory());
217 }
220218
221219 private:
220 /// \brief The current pointer into the current slab.
221 ///
222 /// This points to the next free byte in the slab.
223 char *CurPtr;
224
225 /// \brief The end of the current slab.
226 char *End;
227
228 /// \brief The slabs allocated so far.
229 SmallVector Slabs;
230
231 /// \brief Custom-sized slabs allocated for too-large allocation requests.
232 SmallVector, 0> CustomSizedSlabs;
233
234 /// \brief How many bytes we've allocated.
235 ///
236 /// Used so that we can compute how much space was wasted.
237 size_t BytesAllocated;
238
222239 /// \brief The default allocator used if one is not provided.
223240 MallocSlabAllocator DefaultSlabAllocator;
224241
228245 /// changed to use a custom allocator.
229246 SlabAllocator &Allocator;
230247
231 /// \brief The current pointer into the current slab.
232 ///
233 /// This points to the next free byte in the slab.
234 char *CurPtr;
235
236 /// \brief The end of the current slab.
237 char *End;
238
239 /// \brief How many slabs we've allocated.
240 ///
241 /// Used to scale the size of each slab and reduce the number of allocations
242 /// for extremely heavy memory use scenarios.
243 size_t NumSlabs;
244
245 /// \brief Allocate a new slab and move the bump pointers over into the new
246 /// slab, modifying CurPtr and End.
247 void StartNewSlab() {
248 ++NumSlabs;
248 static size_t computeSlabSize(unsigned SlabIdx) {
249249 // Scale the actual allocated slab size based on the number of slabs
250250 // allocated. Every 128 slabs allocated, we double the allocated size to
251251 // reduce allocation frequency, but saturate at multiplying the slab size by
252252 // 2^30.
253 // FIXME: Currently, this count includes special slabs for objects above the
254 // size threshold. That will be fixed in a subsequent commit to make the
255 // growth even more predictable.
256 size_t AllocatedSlabSize =
257 SlabSize * ((size_t)1 << std::min(30, NumSlabs / 128));
258
259 MemSlab *NewSlab = Allocator.Allocate(AllocatedSlabSize);
260 NewSlab->NextPtr = CurSlab;
261 CurSlab = NewSlab;
262 CurPtr = (char *)(CurSlab + 1);
263 End = ((char *)CurSlab) + CurSlab->Size;
264 }
265
266 /// \brief Deallocate all memory slabs after and including this one.
267 void DeallocateSlabs(MemSlab *Slab) {
268 while (Slab) {
269 MemSlab *NextSlab = Slab->NextPtr;
253 return SlabSize * ((size_t)1 << std::min(30, SlabIdx / 128));
254 }
255
256 /// \brief Allocate a new slab and move the bump pointers over into the new
257 /// slab, modifying CurPtr and End.
258 void StartNewSlab() {
259 size_t AllocatedSlabSize = computeSlabSize(Slabs.size());
260
261 void *NewSlab = Allocator.Allocate(AllocatedSlabSize);
262 Slabs.push_back(NewSlab);
263 CurPtr = (char *)(NewSlab);
264 End = ((char *)NewSlab) + AllocatedSlabSize;
265 }
266
267 /// \brief Deallocate a sequence of slabs.
268 void DeallocateSlabs(SmallVectorImpl::iterator I,
269 SmallVectorImpl::iterator E) {
270 for (; I != E; ++I) {
271 size_t AllocatedSlabSize =
272 computeSlabSize(std::distance(Slabs.begin(), I));
270273 #ifndef NDEBUG
271274 // Poison the memory so stale pointers crash sooner. Note we must
272275 // preserve the Size and NextPtr fields at the beginning.
273 sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab));
274 memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab));
276 sys::Memory::setRangeWritable(*I, AllocatedSlabSize);
277 memset(*I, 0xCD, AllocatedSlabSize);
275278 #endif
276 Allocator.Deallocate(Slab);
277 Slab = NextSlab;
278 --NumSlabs;
279 Allocator.Deallocate(*I, AllocatedSlabSize);
280 }
281 }
282
283 /// \brief Deallocate all memory for custom sized slabs.
284 void DeallocateCustomSizedSlabs() {
285 for (auto &PtrAndSize : CustomSizedSlabs) {
286 void *Ptr = PtrAndSize.first;
287 #ifndef NDEBUG
288 size_t Size = PtrAndSize.second;
289 // Poison the memory so stale pointers crash sooner. Note we must
290 // preserve the Size and NextPtr fields at the beginning.
291 sys::Memory::setRangeWritable(Ptr, Size);
292 memset(Ptr, 0xCD, Size);
293 #endif
294 Allocator.Deallocate(Ptr, Size);
279295 }
280296 }
281297
304320 /// current slab and reset the current pointer to the beginning of it, freeing
305321 /// all memory allocated so far.
306322 void DestroyAll() {
307 MemSlab *Slab = Allocator.CurSlab;
308 while (Slab) {
309 char *End = Slab == Allocator.CurSlab ? Allocator.CurPtr
310 : (char *)Slab + Slab->Size;
311 for (char *Ptr = (char *)(Slab + 1); Ptr < End; Ptr += sizeof(T)) {
312 Ptr = alignPtr(Ptr, alignOf());
313 if (Ptr + sizeof(T) <= End)
314 reinterpret_cast(Ptr)->~T();
315 }
316 Slab = Slab->NextPtr;
317 }
323 auto DestroyElements = [](char *Begin, char *End) {
324 assert(Begin == alignPtr(Begin, alignOf()));
325 for (char *Ptr = Begin; Ptr + sizeof(T) <= End; Ptr += sizeof(T))
326 reinterpret_cast(Ptr)->~T();
327 };
328
329 for (auto I = Allocator.Slabs.begin(), E = Allocator.Slabs.end(); I != E;
330 ++I) {
331 size_t AllocatedSlabSize = BumpPtrAllocator::computeSlabSize(
332 std::distance(Allocator.Slabs.begin(), I));
333 char *Begin = alignPtr((char *)*I, alignOf());
334 char *End = *I == Allocator.Slabs.back() ? Allocator.CurPtr
335 : (char *)*I + AllocatedSlabSize;
336
337 DestroyElements(Begin, End);
338 }
339
340 for (auto &PtrAndSize : Allocator.CustomSizedSlabs) {
341 void *Ptr = PtrAndSize.first;
342 size_t Size = PtrAndSize.second;
343 DestroyElements(alignPtr((char *)Ptr, alignOf()), (char *)Ptr + Size);
344 }
345
318346 Allocator.Reset();
319347 }
320348
321349 /// \brief Allocate space for an array of objects without constructing them.
322350 T *Allocate(size_t num = 1) { return Allocator.Allocate(num); }
351
352 private:
323353 };
324354
325355 } // end namespace llvm
273273 public:
274274 JITSlabAllocator(DefaultJITMemoryManager &jmm) : JMM(jmm) { }
275275 virtual ~JITSlabAllocator() { }
276 MemSlab *Allocate(size_t Size) override;
277 void Deallocate(MemSlab *Slab) override;
276 void *Allocate(size_t Size) override;
277 void Deallocate(void *Slab, size_t Size) override;
278278 };
279279
280280 /// DefaultJITMemoryManager - Manage memory for the JIT code generation.
567567 };
568568 }
569569
570 MemSlab *JITSlabAllocator::Allocate(size_t Size) {
570 void *JITSlabAllocator::Allocate(size_t Size) {
571571 sys::MemoryBlock B = JMM.allocateNewSlab(Size);
572 MemSlab *Slab = (MemSlab*)B.base();
573 Slab->Size = B.size();
574 Slab->NextPtr = 0;
575 return Slab;
576 }
577
578 void JITSlabAllocator::Deallocate(MemSlab *Slab) {
579 sys::MemoryBlock B(Slab, Slab->Size);
572 return B.base();
573 }
574
575 void JITSlabAllocator::Deallocate(void *Slab, size_t Size) {
576 sys::MemoryBlock B(Slab, Size);
580577 sys::Memory::ReleaseRWX(B);
581578 }
582579
2424
2525 MallocSlabAllocator::~MallocSlabAllocator() { }
2626
27 MemSlab *MallocSlabAllocator::Allocate(size_t Size) {
28 MemSlab *Slab = (MemSlab*)Allocator.Allocate(Size, 0);
29 Slab->Size = Size;
30 Slab->NextPtr = nullptr;
31 return Slab;
27 void *MallocSlabAllocator::Allocate(size_t Size) {
28 return Allocator.Allocate(Size, 0);
3229 }
3330
34 void MallocSlabAllocator::Deallocate(MemSlab *Slab) {
31 void MallocSlabAllocator::Deallocate(void *Slab, size_t Size) {
3532 Allocator.Deallocate(Slab);
3633 }
3734
38 void BumpPtrAllocatorBase::PrintStats() const {
39 unsigned NumSlabs = 0;
40 size_t TotalMemory = 0;
41 for (MemSlab *Slab = CurSlab; Slab; Slab = Slab->NextPtr) {
42 TotalMemory += Slab->Size;
43 ++NumSlabs;
44 }
45
35 void printBumpPtrAllocatorStats(unsigned NumSlabs, size_t BytesAllocated,
36 size_t TotalMemory) {
4637 errs() << "\nNumber of memory regions: " << NumSlabs << '\n'
4738 << "Bytes used: " << BytesAllocated << '\n'
4839 << "Bytes allocated: " << TotalMemory << '\n'
4940 << "Bytes wasted: " << (TotalMemory - BytesAllocated)
5041 << " (includes alignment, etc)\n";
51 }
52
53 size_t BumpPtrAllocatorBase::getTotalMemory() const {
54 size_t TotalMemory = 0;
55 for (MemSlab *Slab = CurSlab; Slab; Slab = Slab->NextPtr) {
56 TotalMemory += Slab->Size;
57 }
58 return TotalMemory;
5942 }
6043
6144 void PrintRecyclerStats(size_t Size,
8383 BumpPtrAllocator Alloc;
8484
8585 // Fill the slab right up until the end pointer.
86 Alloc.Allocate(4096 - sizeof(MemSlab), 0);
86 Alloc.Allocate(4096, 0);
8787 EXPECT_EQ(1U, Alloc.GetNumSlabs());
8888
8989 // If we don't allocate a new slab, then we will have overflowed.
102102 // Mock slab allocator that returns slabs aligned on 4096 bytes. There is no
103103 // easy portable way to do this, so this is kind of a hack.
104104 class MockSlabAllocator : public SlabAllocator {
105 MemSlab *LastSlab;
105 size_t LastSlabSize;
106106
107107 public:
108108 virtual ~MockSlabAllocator() { }
109109
110 virtual MemSlab *Allocate(size_t Size) {
110 virtual void *Allocate(size_t Size) {
111111 // Allocate space for the alignment, the slab, and a void* that goes right
112112 // before the slab.
113113 size_t Alignment = 4096;
114114 void *MemBase = malloc(Size + Alignment - 1 + sizeof(void*));
115115
116 // Make the slab.
117 MemSlab *Slab = (MemSlab*)(((uintptr_t)MemBase+sizeof(void*)+Alignment-1) &
118 ~(uintptr_t)(Alignment - 1));
119 Slab->Size = Size;
120 Slab->NextPtr = 0;
116 // Find the slab start.
117 void *Slab = alignPtr((char *)MemBase + sizeof(void *), Alignment);
121118
122119 // Hold a pointer to the base so we can free the whole malloced block.
123120 ((void**)Slab)[-1] = MemBase;
124121
125 LastSlab = Slab;
122 LastSlabSize = Size;
126123 return Slab;
127124 }
128125
129 virtual void Deallocate(MemSlab *Slab) {
126 virtual void Deallocate(void *Slab, size_t Size) {
130127 free(((void**)Slab)[-1]);
131128 }
132129
133 MemSlab *GetLastSlab() {
134 return LastSlab;
135 }
130 size_t GetLastSlabSize() { return LastSlabSize; }
136131 };
137132
138133 // Allocate a large-ish block with a really large alignment so that the
141136 TEST(AllocatorTest, TestBigAlignment) {
142137 MockSlabAllocator SlabAlloc;
143138 BumpPtrAllocator Alloc(SlabAlloc);
144 uintptr_t Ptr = (uintptr_t)Alloc.Allocate(3000, 2048);
145 MemSlab *Slab = SlabAlloc.GetLastSlab();
146 EXPECT_LE(Ptr + 3000, ((uintptr_t)Slab) + Slab->Size);
139
140 // First allocate a tiny bit to ensure we have to re-align things.
141 (void)Alloc.Allocate(1, 0);
142
143 // Now the big chunk with a big alignment.
144 (void)Alloc.Allocate(3000, 2048);
145
146 // We test that the last slab size is not the default 4096 byte slab, but
147 // rather a custom sized slab that is larger.
148 EXPECT_GT(SlabAlloc.GetLastSlabSize(), 4096u);
147149 }
148150
149151 } // anonymous namespace