llvm.org GIT mirror llvm / 368a977
[Allocator] Lift the slab size and size threshold into template parameters rather than runtime parameters. There is only one user of these parameters and they are compile time for that user. Making these compile time seems to better reflect their intended usage as well. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@205143 91177308-0d34-0410-b5e6-96231b3b80d8 Chandler Carruth 6 years ago
4 changed file(s) with 171 addition(s) and 185 deletion(s). Raw diff Collapse all Expand all
1616 #include "llvm/Support/AlignOf.h"
1717 #include "llvm/Support/DataTypes.h"
1818 #include "llvm/Support/MathExtras.h"
19 #include "llvm/Support/Memory.h"
1920 #include
2021 #include
2122 #include
8485 virtual ~MallocSlabAllocator();
8586 MemSlab *Allocate(size_t Size) override;
8687 void Deallocate(MemSlab *Slab) override;
88 };
89
90 /// \brief Non-templated base class for the \c BumpPtrAllocatorImpl template.
91 class BumpPtrAllocatorBase {
92 public:
93 void Deallocate(const void * /*Ptr*/) {}
94 void PrintStats() const;
95
96 /// \brief Returns the total physical memory allocated by this allocator.
97 size_t getTotalMemory() const;
98
99 protected:
100 /// \brief The slab that we are currently allocating into.
101 MemSlab *CurSlab;
102
103 /// \brief How many bytes we've allocated.
104 ///
105 /// Used so that we can compute how much space was wasted.
106 size_t BytesAllocated;
107
108 BumpPtrAllocatorBase() : CurSlab(0), BytesAllocated(0) {}
87109 };
88110
89111 /// \brief Allocate memory in an ever growing pool, as if by bump-pointer.
96118 ///
97119 /// Note that this also has a threshold for forcing allocations above a certain
98120 /// size into their own slab.
99 class BumpPtrAllocator {
100 BumpPtrAllocator(const BumpPtrAllocator &) LLVM_DELETED_FUNCTION;
101 void operator=(const BumpPtrAllocator &) LLVM_DELETED_FUNCTION;
102
103 public:
104 BumpPtrAllocator(size_t size = 4096, size_t threshold = 4096);
105 BumpPtrAllocator(size_t size, size_t threshold, SlabAllocator &allocator);
106 ~BumpPtrAllocator();
121 template
122 class BumpPtrAllocatorImpl : public BumpPtrAllocatorBase {
123 BumpPtrAllocatorImpl(const BumpPtrAllocatorImpl &) LLVM_DELETED_FUNCTION;
124 void operator=(const BumpPtrAllocatorImpl &) LLVM_DELETED_FUNCTION;
125
126 public:
127 static_assert(SizeThreshold <= SlabSize,
128 "The SizeThreshold must be at most the SlabSize to ensure "
129 "that objects larger than a slab go into their own memory "
130 "allocation.");
131
132 BumpPtrAllocatorImpl()
133 : Allocator(DefaultSlabAllocator), NumSlabs(0) {}
134 BumpPtrAllocatorImpl(SlabAllocator &Allocator)
135 : Allocator(Allocator), NumSlabs(0) {}
136 ~BumpPtrAllocatorImpl() { DeallocateSlabs(CurSlab); }
107137
108138 /// \brief Deallocate all but the current slab and reset the current pointer
109139 /// to the beginning of it, freeing all memory allocated so far.
110 void Reset();
140 void Reset() {
141 if (!CurSlab)
142 return;
143 DeallocateSlabs(CurSlab->NextPtr);
144 CurSlab->NextPtr = 0;
145 CurPtr = (char *)(CurSlab + 1);
146 End = ((char *)CurSlab) + CurSlab->Size;
147 BytesAllocated = 0;
148 }
111149
112150 /// \brief Allocate space at the specified alignment.
113 void *Allocate(size_t Size, size_t Alignment);
151 void *Allocate(size_t Size, size_t Alignment) {
152 if (!CurSlab) // Start a new slab if we haven't allocated one already.
153 StartNewSlab();
154
155 // Keep track of how many bytes we've allocated.
156 BytesAllocated += Size;
157
158 // 0-byte alignment means 1-byte alignment.
159 if (Alignment == 0)
160 Alignment = 1;
161
162 // Allocate the aligned space, going forwards from CurPtr.
163 char *Ptr = alignPtr(CurPtr, Alignment);
164
165 // Check if we can hold it.
166 if (Ptr + Size <= End) {
167 CurPtr = Ptr + Size;
168 // Update the allocation point of this memory block in MemorySanitizer.
169 // Without this, MemorySanitizer messages for values originated from here
170 // will point to the allocation of the entire slab.
171 __msan_allocated_memory(Ptr, Size);
172 return Ptr;
173 }
174
175 // If Size is really big, allocate a separate slab for it.
176 size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1;
177 if (PaddedSize > SizeThreshold) {
178 ++NumSlabs;
179 MemSlab *NewSlab = Allocator.Allocate(PaddedSize);
180
181 // Put the new slab after the current slab, since we are not allocating
182 // into it.
183 NewSlab->NextPtr = CurSlab->NextPtr;
184 CurSlab->NextPtr = NewSlab;
185
186 Ptr = alignPtr((char *)(NewSlab + 1), Alignment);
187 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + NewSlab->Size);
188 __msan_allocated_memory(Ptr, Size);
189 return Ptr;
190 }
191
192 // Otherwise, start a new slab and try again.
193 StartNewSlab();
194 Ptr = alignPtr(CurPtr, Alignment);
195 CurPtr = Ptr + Size;
196 assert(CurPtr <= End && "Unable to allocate memory!");
197 __msan_allocated_memory(Ptr, Size);
198 return Ptr;
199 }
114200
115201 /// \brief Allocate space for one object without constructing it.
116202 template T *Allocate() {
130216 return static_cast(Allocate(Num * EltSize, Alignment));
131217 }
132218
133 void Deallocate(const void * /*Ptr*/) {}
134
135219 size_t GetNumSlabs() const { return NumSlabs; }
136220
137 void PrintStats() const;
138
139 /// \brief Returns the total physical memory allocated by this allocator.
140 size_t getTotalMemory() const;
141
142221 private:
143 /// \brief Allocate at least this many bytes of memory in a slab.
144 size_t SlabSize;
145
146 /// \brief Threshold above which allocations to go into a dedicated slab.
147 size_t SizeThreshold;
148
149222 /// \brief The default allocator used if one is not provided.
150223 MallocSlabAllocator DefaultSlabAllocator;
151224
155228 /// changed to use a custom allocator.
156229 SlabAllocator &Allocator;
157230
158 /// \brief The slab that we are currently allocating into.
159 MemSlab *CurSlab;
160
161231 /// \brief The current pointer into the current slab.
162232 ///
163233 /// This points to the next free byte in the slab.
165235
166236 /// \brief The end of the current slab.
167237 char *End;
168
169 /// \brief How many bytes we've allocated.
170 ///
171 /// Used so that we can compute how much space was wasted.
172 size_t BytesAllocated;
173238
174239 /// \brief How many slabs we've allocated.
175240 ///
179244
180245 /// \brief Allocate a new slab and move the bump pointers over into the new
181246 /// slab, modifying CurPtr and End.
182 void StartNewSlab();
247 void StartNewSlab() {
248 ++NumSlabs;
249 // Scale the actual allocated slab size based on the number of slabs
250 // allocated. Every 128 slabs allocated, we double the allocated size to
251 // reduce allocation frequency, but saturate at multiplying the slab size by
252 // 2^30.
253 // FIXME: Currently, this count includes special slabs for objects above the
254 // size threshold. That will be fixed in a subsequent commit to make the
255 // growth even more predictable.
256 size_t AllocatedSlabSize =
257 SlabSize * (1 << std::min(30, NumSlabs / 128));
258
259 MemSlab *NewSlab = Allocator.Allocate(AllocatedSlabSize);
260 NewSlab->NextPtr = CurSlab;
261 CurSlab = NewSlab;
262 CurPtr = (char *)(CurSlab + 1);
263 End = ((char *)CurSlab) + CurSlab->Size;
264 }
183265
184266 /// \brief Deallocate all memory slabs after and including this one.
185 void DeallocateSlabs(MemSlab *Slab);
267 void DeallocateSlabs(MemSlab *Slab) {
268 while (Slab) {
269 MemSlab *NextSlab = Slab->NextPtr;
270 #ifndef NDEBUG
271 // Poison the memory so stale pointers crash sooner. Note we must
272 // preserve the Size and NextPtr fields at the beginning.
273 sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab));
274 memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab));
275 #endif
276 Allocator.Deallocate(Slab);
277 Slab = NextSlab;
278 --NumSlabs;
279 }
280 }
186281
187282 template friend class SpecificBumpPtrAllocator;
188283 };
284
285 /// \brief The standard BumpPtrAllocator which just uses the default template
286 /// paramaters.
287 typedef BumpPtrAllocatorImpl<> BumpPtrAllocator;
189288
190289 /// \brief A BumpPtrAllocator that allows only elements of a specific type to be
191290 /// allocated.
196295 BumpPtrAllocator Allocator;
197296
198297 public:
199 SpecificBumpPtrAllocator(size_t size = 4096, size_t threshold = 4096)
200 : Allocator(size, threshold) {}
201 SpecificBumpPtrAllocator(size_t size, size_t threshold,
202 SlabAllocator &allocator)
203 : Allocator(size, threshold, allocator) {}
298 SpecificBumpPtrAllocator() : Allocator() {}
299 SpecificBumpPtrAllocator(SlabAllocator &allocator) : Allocator(allocator) {}
204300
205301 ~SpecificBumpPtrAllocator() { DestroyAll(); }
206302
228324
229325 } // end namespace llvm
230326
231 inline void *operator new(size_t Size, llvm::BumpPtrAllocator &Allocator) {
327 template
328 void *
329 operator new(size_t Size,
330 llvm::BumpPtrAllocatorImpl &Allocator) {
232331 struct S {
233332 char c;
234333 union {
238337 void *P;
239338 } x;
240339 };
241 return Allocator.Allocate(Size, std::min((size_t)llvm::NextPowerOf2(Size),
242 offsetof(S, x)));
340 return Allocator.Allocate(
341 Size, std::min((size_t)llvm::NextPowerOf2(Size), offsetof(S, x)));
243342 }
244343
245 inline void operator delete(void *, llvm::BumpPtrAllocator &) {}
344 template
345 void operator delete(void *,
346 llvm::BumpPtrAllocatorImpl &) {}
246347
247348 #endif // LLVM_SUPPORT_ALLOCATOR_H
313313 // confuse them with the blocks of memory described above.
314314 std::vector CodeSlabs;
315315 JITSlabAllocator BumpSlabAllocator;
316 BumpPtrAllocator StubAllocator;
317 BumpPtrAllocator DataAllocator;
316 BumpPtrAllocatorImpl StubAllocator;
317 BumpPtrAllocatorImpl DataAllocator;
318318
319319 // Circular list of free blocks.
320320 FreeRangeHeader *FreeMemoryList;
589589 #endif
590590 LastSlab(0, 0),
591591 BumpSlabAllocator(*this),
592 StubAllocator(DefaultSlabSize, DefaultSizeThreshold, BumpSlabAllocator),
593 DataAllocator(DefaultSlabSize, DefaultSizeThreshold, BumpSlabAllocator) {
592 StubAllocator(BumpSlabAllocator),
593 DataAllocator(BumpSlabAllocator) {
594594
595595 // Allocate space for code.
596596 sys::MemoryBlock MemBlock = allocateNewSlab(DefaultCodeSlabSize);
2020
2121 namespace llvm {
2222
23 BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold,
24 SlabAllocator &allocator)
25 : SlabSize(size), SizeThreshold(std::min(size, threshold)),
26 Allocator(allocator), CurSlab(0), BytesAllocated(0), NumSlabs(0) {}
27
28 BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold)
29 : SlabSize(size), SizeThreshold(std::min(size, threshold)),
30 Allocator(DefaultSlabAllocator), CurSlab(0), BytesAllocated(0),
31 NumSlabs(0) {}
32
33 BumpPtrAllocator::~BumpPtrAllocator() {
34 DeallocateSlabs(CurSlab);
35 }
36
37 /// StartNewSlab - Allocate a new slab and move the bump pointers over into
38 /// the new slab. Modifies CurPtr and End.
39 void BumpPtrAllocator::StartNewSlab() {
40 ++NumSlabs;
41 // Scale the actual allocated slab size based on the number of slabs
42 // allocated. Every 128 slabs allocated, we double the allocated size to
43 // reduce allocation frequency, but saturate at multiplying the slab size by
44 // 2^30.
45 // FIXME: Currently, this count includes special slabs for objects above the
46 // size threshold. That will be fixed in a subsequent commit to make the
47 // growth even more predictable.
48 size_t AllocatedSlabSize =
49 SlabSize * (1 << std::min(30, NumSlabs / 128));
50
51 MemSlab *NewSlab = Allocator.Allocate(AllocatedSlabSize);
52 NewSlab->NextPtr = CurSlab;
53 CurSlab = NewSlab;
54 CurPtr = (char*)(CurSlab + 1);
55 End = ((char*)CurSlab) + CurSlab->Size;
56 }
57
58 /// DeallocateSlabs - Deallocate all memory slabs after and including this
59 /// one.
60 void BumpPtrAllocator::DeallocateSlabs(MemSlab *Slab) {
61 while (Slab) {
62 MemSlab *NextSlab = Slab->NextPtr;
63 #ifndef NDEBUG
64 // Poison the memory so stale pointers crash sooner. Note we must
65 // preserve the Size and NextPtr fields at the beginning.
66 sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab));
67 memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab));
68 #endif
69 Allocator.Deallocate(Slab);
70 Slab = NextSlab;
71 --NumSlabs;
72 }
73 }
74
75 /// Reset - Deallocate all but the current slab and reset the current pointer
76 /// to the beginning of it, freeing all memory allocated so far.
77 void BumpPtrAllocator::Reset() {
78 if (!CurSlab)
79 return;
80 DeallocateSlabs(CurSlab->NextPtr);
81 CurSlab->NextPtr = 0;
82 CurPtr = (char*)(CurSlab + 1);
83 End = ((char*)CurSlab) + CurSlab->Size;
84 BytesAllocated = 0;
85 }
86
87 /// Allocate - Allocate space at the specified alignment.
88 ///
89 void *BumpPtrAllocator::Allocate(size_t Size, size_t Alignment) {
90 if (!CurSlab) // Start a new slab if we haven't allocated one already.
91 StartNewSlab();
92
93 // Keep track of how many bytes we've allocated.
94 BytesAllocated += Size;
95
96 // 0-byte alignment means 1-byte alignment.
97 if (Alignment == 0) Alignment = 1;
98
99 // Allocate the aligned space, going forwards from CurPtr.
100 char *Ptr = alignPtr(CurPtr, Alignment);
101
102 // Check if we can hold it.
103 if (Ptr + Size <= End) {
104 CurPtr = Ptr + Size;
105 // Update the allocation point of this memory block in MemorySanitizer.
106 // Without this, MemorySanitizer messages for values originated from here
107 // will point to the allocation of the entire slab.
108 __msan_allocated_memory(Ptr, Size);
109 return Ptr;
110 }
111
112 // If Size is really big, allocate a separate slab for it.
113 size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1;
114 if (PaddedSize > SizeThreshold) {
115 ++NumSlabs;
116 MemSlab *NewSlab = Allocator.Allocate(PaddedSize);
117
118 // Put the new slab after the current slab, since we are not allocating
119 // into it.
120 NewSlab->NextPtr = CurSlab->NextPtr;
121 CurSlab->NextPtr = NewSlab;
122
123 Ptr = alignPtr((char*)(NewSlab + 1), Alignment);
124 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + NewSlab->Size);
125 __msan_allocated_memory(Ptr, Size);
126 return Ptr;
127 }
128
129 // Otherwise, start a new slab and try again.
130 StartNewSlab();
131 Ptr = alignPtr(CurPtr, Alignment);
132 CurPtr = Ptr + Size;
133 assert(CurPtr <= End && "Unable to allocate memory!");
134 __msan_allocated_memory(Ptr, Size);
135 return Ptr;
136 }
137
138 size_t BumpPtrAllocator::getTotalMemory() const {
139 size_t TotalMemory = 0;
140 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
141 TotalMemory += Slab->Size;
142 }
143 return TotalMemory;
144 }
145
146 void BumpPtrAllocator::PrintStats() const {
147 unsigned NumSlabs = 0;
148 size_t TotalMemory = 0;
149 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
150 TotalMemory += Slab->Size;
151 ++NumSlabs;
152 }
153
154 errs() << "\nNumber of memory regions: " << NumSlabs << '\n'
155 << "Bytes used: " << BytesAllocated << '\n'
156 << "Bytes allocated: " << TotalMemory << '\n'
157 << "Bytes wasted: " << (TotalMemory - BytesAllocated)
158 << " (includes alignment, etc)\n";
159 }
160
16123 SlabAllocator::~SlabAllocator() { }
16224
16325 MallocSlabAllocator::~MallocSlabAllocator() { }
17335 Allocator.Deallocate(Slab);
17436 }
17537
38 void BumpPtrAllocatorBase::PrintStats() const {
39 unsigned NumSlabs = 0;
40 size_t TotalMemory = 0;
41 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
42 TotalMemory += Slab->Size;
43 ++NumSlabs;
44 }
45
46 errs() << "\nNumber of memory regions: " << NumSlabs << '\n'
47 << "Bytes used: " << BytesAllocated << '\n'
48 << "Bytes allocated: " << TotalMemory << '\n'
49 << "Bytes wasted: " << (TotalMemory - BytesAllocated)
50 << " (includes alignment, etc)\n";
51 }
52
53 size_t BumpPtrAllocatorBase::getTotalMemory() const {
54 size_t TotalMemory = 0;
55 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
56 TotalMemory += Slab->Size;
57 }
58 return TotalMemory;
59 }
60
17661 void PrintRecyclerStats(size_t Size,
17762 size_t Align,
17863 size_t FreeListSize) {
140140 // will not.
141141 TEST(AllocatorTest, TestBigAlignment) {
142142 MockSlabAllocator SlabAlloc;
143 BumpPtrAllocator Alloc(4096, 4096, SlabAlloc);
143 BumpPtrAllocator Alloc(SlabAlloc);
144144 uintptr_t Ptr = (uintptr_t)Alloc.Allocate(3000, 2048);
145145 MemSlab *Slab = SlabAlloc.GetLastSlab();
146146 EXPECT_LE(Ptr + 3000, ((uintptr_t)Slab) + Slab->Size);