ATLAS Offline Software
ArenaSharedHeapSTLAllocator.icc
Go to the documentation of this file.
1 /*
2  Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
3 */
4 /**
5  * @file AthAllocators/ArenaSharedHeapSTLAllocator.icc
6  * @author scott snyder <snyder@bnl.gov>
7  * @date Nov, 2011
8  * @brief STL-style allocator wrapper for @c ArenaHeapAllocator allowing
9  * the heap to be shared between containers.
10  */
11 
12 
13 #include "GaudiKernel/System.h"
14 #include "AthAllocators/exceptions.h"
15 #include <cassert>
16 #include <stdexcept>
17 
18 
19 namespace SG {
20 
21 
22 /**
23  * @brief Call this when an allocator is being deleted.
24  * @param a The address of calling allocator.
25  *
26  * If the address matches the address we were given when we were created,
27  * this object will be destroyed.
28  */
29 inline
30 void ArenaSharedHeapSTLHeader::maybe_delete (const void* a)
31 {
32  if (a == m_owner) {
33  delete this;
34  }
35 }
36 
37 
38 /**
39  * @brief Return the name to use for an allocator for type @c T.
40  */
41 template <class T>
42 std::string ArenaSharedHeapSTLHeader::get_name()
43 {
44  return "ArenaSharedHeapSTLAllocator<" +
45  System::typeinfoName (typeid (T)) + ">";
46 }
47 
48 
49 /**
50  * @brief Return the heap allocator for type @c T.
51  */
52 template <class T>
53 ArenaHeapAllocator* ArenaSharedHeapSTLHeader::get_pool()
54 {
55  const static size_t index = get_index<T>();
56 
57  // Expand the list of allocators if needed.
58  if (index >= m_allocators.size()) {
59  m_allocators.resize (index+1);
60  }
61 
62  // Create the allocator if we haven't done so yet.
63  if (!m_allocators[index]) {
64  m_allocators[index] = new ArenaHeapAllocator
65  (ArenaHeapSTLAllocator_initParams<T> (m_nblock, get_name<T>()));
66  }
67 
68  // Return the allocator.
69  return m_allocators[index];
70 }
71 
72 
73 /**
74  * @brief Update the owner of this object.
75  * @param old_owner Object giving up ownership.
76  * @param new_owner Object acquiring ownership.
77  *
78  * If the current owner is @c old_owner then change the owner to @c new_owner.
79  */
80 inline
81 void ArenaSharedHeapSTLHeader::update_owner (const void* old_owner,
82  const void* new_owner)
83 {
84  if (m_owner == old_owner)
85  m_owner = new_owner;
86 }
87 
88 
89 /**
90  * @brief Return the allocator index to use for type @c T.
91  */
92 template <class T>
93 size_t ArenaSharedHeapSTLHeader::get_index()
94 {
95  // Note: We're only using the registry for the name<->index
96  // mapping; we're not using it to construct the allocators
97  // for us. This, we pass in a null pointer for the constructor.
98  // (We don't construct the allocators from the registry because
99  // we cant to be able to change the number of blocks from instance
100  // to instance, but there's no way passing that to the Registry
101  // interface.)
102 
103  std::string name = get_name<T>();
104  ArenaAllocatorRegistry* reg = ArenaAllocatorRegistry::instance();
105  size_t index = reg->lookup (name);
106  if (index == std::string::npos) {
107  index = reg->registerCreator (name, 0);
108  }
109  return index;
110 }
111 
112 
113 //===========================================================================
114 
115 
116 /**
117  * @brief Default constructor.
118  * @param nblock Value to set in the parameters structure for the
119  * number of elements to allocate per block.
120  */
121 template <class T>
122 ArenaSharedHeapSTLAllocator<T>::ArenaSharedHeapSTLAllocator
123  (size_t nblock /*= 1000*/)
124  : m_header (nullptr),
125  m_pool (nullptr)
126 {
127  // Done in two steps like this to avoid maybe-uninitialized warnings
128  // from gcc11.
129  m_header = new ArenaSharedHeapSTLHeader (this, nblock);
130  m_pool = m_header->get_pool<T>();
131 }
132 
133 
134 /**
135  * @brief Copy constructor.
136  *
137  * The new STL allocator will reference the same set of underlying
138  * Arena allocators as the old one.
139  */
140 template <class T>
141 inline
142 ArenaSharedHeapSTLAllocator<T>::ArenaSharedHeapSTLAllocator
143  (const ArenaSharedHeapSTLAllocator& a)
144  : m_header (a.m_header),
145  m_pool (a.m_pool)
146 {
147 }
148 
149 
150 /**
151  * @brief Constructor from another @c ArenaSharedHeapSTLAllocator.
152  *
153  * The new STL allocator will reference the same set of underlying
154  * Arena allocators as the old one.
155  */
156 template <class T>
157 template <class U>
158 inline
159 ArenaSharedHeapSTLAllocator<T>::ArenaSharedHeapSTLAllocator
160  (const ArenaSharedHeapSTLAllocator<U>& a)
161  : m_header (a.m_header),
162  m_pool (m_header->get_pool<T>())
163 {
164 }
165 
166 
167 /**
168  * @brief Destructor.
169  */
170 template <class T>
171 inline
172 ArenaSharedHeapSTLAllocator<T>::~ArenaSharedHeapSTLAllocator()
173 {
174  m_header->maybe_delete (this);
175 }
176 
177 
178 /**
179  * @brief Assignment.
180  *
181  * We allow assignment only if the two objects involved represent
182  * the same arena, in which case it's a no-op.
183  * In other cases, we raise an exception.
184  */
185 template <class T>
186 ArenaSharedHeapSTLAllocator<T>&
187 ArenaSharedHeapSTLAllocator<T>::operator=
188  (const ArenaSharedHeapSTLAllocator& a)
189 {
190  if (&a != this) {
191  if (m_header != a.m_header) {
192  throw SG::ExcDifferentArenas();
193  }
194  assert (m_pool == a.m_pool);
195  }
196  return *this;
197 }
198 
199 
200 /**
201  * @brief Move assignment.
202  *
203  * This allows assignment between different arenas.
204  */
205 template <class T>
206 ArenaSharedHeapSTLAllocator<T>&
207 ArenaSharedHeapSTLAllocator<T>::operator= (ArenaSharedHeapSTLAllocator&& a)
208 {
209  if (this != &a && m_header != a.m_header) {
210  m_header = a.m_header;
211  m_pool = a.m_pool;
212  }
213  return *this;
214 }
215 
216 
217 /**
218  * @brief Swap.
219  */
220 template <class T>
221 void ArenaSharedHeapSTLAllocator<T>::swap (ArenaSharedHeapSTLAllocator& a)
222 {
223  if (m_header != a.m_header) {
224  std::swap (m_header, a.m_header);
225  m_header->update_owner (&a, this);
226  a.m_header->update_owner (this, &a);
227  std::swap (m_pool, a.m_pool);
228  }
229 }
230 
231 
232 /**
233  * @brief Equality test.
234  *
235  * Two allocators should compare equal if objects allocated by one
236  * can be deallocated by the other. We check if they are referencing
237  * the same Header.
238  */
239 template <class T>
240 inline
241 bool ArenaSharedHeapSTLAllocator<T>::operator==
242  (const ArenaSharedHeapSTLAllocator& other) const
243 {
244  return m_header == other.m_header;
245 }
246 
247 
248 /**
249  * @brief Inequality test.
250  *
251  * Two allocators should compare equal if objects allocated by one
252  * can be deallocated by the other. We check if they are referencing
253  * the same Header.
254  */
255 template <class T>
256 inline
257 bool ArenaSharedHeapSTLAllocator<T>::operator!=
258  (const ArenaSharedHeapSTLAllocator& other) const
259 {
260  return m_header != other.m_header;
261 }
262 
263 
264 /**
265  * @brief Convert a reference to an address.
266  */
267 template <class T>
268 inline
269 typename ArenaSharedHeapSTLAllocator<T>::pointer
270 ArenaSharedHeapSTLAllocator<T>::address (reference x) const
271 {
272  return &x;
273 }
274 
275 
276 /**
277  * @brief Allocate new objects.
278  * @param n Number of objects to allocate. Must be 1.
279  * @param hint Allocation hint. Not used.
280  */
281 template <class T>
282 inline
283 typename ArenaSharedHeapSTLAllocator<T>::pointer
284 ArenaSharedHeapSTLAllocator<T>::allocate (size_type
285 #ifndef NDEBUG
286  n
287 #endif
288  , const void* /*hint = 0*/)
289 {
290  assert (n == 1);
291  return reinterpret_cast<pointer> (poolptr()->allocate());
292 }
293 
294 
295 /**
296  * @brief Deallocate objects.
297  * @param n Number of objects to deallocate. Must be 1.
298  *
299  * This implementation doesn't do anything.
300  */
301 template <class T>
302 inline
303 void ArenaSharedHeapSTLAllocator<T>::deallocate (pointer p, size_type
304 #ifndef NDEBUG
305  n
306 #endif
307  )
308 {
309  assert (n == 1);
310  using pointer_nc = std::remove_const_t<T>*;
311  pointer_nc pp ATLAS_THREAD_SAFE = const_cast<pointer_nc>(p);
312  poolptr()->free (reinterpret_cast<ArenaAllocatorBase::pointer> (pp));
313 }
314 
315 
316 /**
317  * @brief Return the maximum number of objects we can allocate at once.
318  *
319  * This always returns 1.
320  */
321 template <class T>
322 inline
323 typename ArenaSharedHeapSTLAllocator<T>::size_type
324 ArenaSharedHeapSTLAllocator<T>::max_size() const throw()
325 {
326  return 1;
327 }
328 
329 
330 /**
331  * @brief Call the @c T constructor.
332  * @param p Location of the memory.
333  * @param args Arguments to pass to the constructor.
334  */
335 template <class T>
336 template <class... Args>
337 inline
338 void ArenaSharedHeapSTLAllocator<T>::construct (pointer p, Args&&... args)
339 {
340  new (p) T(std::forward<Args>(args)...);
341 }
342 
343 
344 /**
345  * @brief Call the @c T destructor.
346  * @param p Location of the memory.
347  */
348 template <class T>
349 inline
350 void ArenaSharedHeapSTLAllocator<T>::destroy (pointer p)
351 {
352  p->~T();
353 }
354 
355 
356 /**
357  * @brief Return the hinted number of objects allocated per block.
358  */
359 template <class T>
360 inline
361 size_t ArenaSharedHeapSTLAllocator<T>::nblock() const
362 {
363  return poolptr()->params().nblock;
364 }
365 
366 
367 /**
368  * @brief Return the name of this allocator.
369  */
370 template <class T>
371 inline
372 const std::string& ArenaSharedHeapSTLAllocator<T>::name() const
373 {
374  return poolptr()->name();
375 }
376 
377 
378 /**
379  * @brief Free all allocated elements.
380  *
381  * All elements allocated are returned to the free state.
382  * @c clear should be called on them if it was provided.
383  * The elements may continue to be cached internally, without
384  * returning to the system.
385  */
386 template <class T>
387 void ArenaSharedHeapSTLAllocator<T>::reset()
388 {
389  poolptr()->reset();
390 }
391 
392 
393 /**
394  * @brief Free all allocated elements and release memory back to the system.
395  *
396  * All elements allocated are freed, and all allocated blocks of memory
397  * are released back to the system.
398  * @c destructor should be called on them if it was provided
399  * (preceded by @c clear if provided and @c mustClear was set).
400  */
401 template <class T>
402 void ArenaSharedHeapSTLAllocator<T>::erase()
403 {
404  poolptr()->erase();
405 }
406 
407 
408 /**
409  * @brief Set the total number of elements cached by the allocator.
410  * @param size The desired pool size.
411  *
412  * This allows changing the number of elements that are currently free
413  * but cached. Any allocated elements are not affected by this call.
414  *
415  * If @c size is greater than the total number of elements currently
416  * cached, then more will be allocated. This will preferably done
417  * with a single block, but that is not guaranteed; in addition, the
418  * allocator may allocate more elements than is requested.
419  *
420  * If @c size is smaller than the total number of elements currently
421  * cached, as many blocks as possible will be released back to the system.
422  * It may not be possible to release the number of elements requested;
423  * this should be implemented on a best-effort basis.
424  */
425 template <class T>
426 void ArenaSharedHeapSTLAllocator<T>::reserve (size_t size)
427 {
428  poolptr()->reserve (size);
429 }
430 
431 
432 /**
433  * @brief Return the statistics block for this allocator.
434  */
435 template <class T>
436 ArenaAllocatorBase::Stats
437 ArenaSharedHeapSTLAllocator<T>::stats() const
438 {
439  return poolptr()->stats();
440 }
441 
442 
443 /**
444  * @brief Return the statistics blocks summed up over all allocators
445  * using this pool.
446  */
447 template <class T>
448 inline
449 ArenaAllocatorBase::Stats ArenaSharedHeapSTLAllocator<T>::totstats() const
450 {
451  return m_header->totstats();
452 }
453 
454 
455 /**
456  * @brief Return a pointer to the underlying allocator.
457  */
458 template <class T>
459 inline
460 ArenaHeapAllocator* ArenaSharedHeapSTLAllocator<T>::poolptr()
461 {
462  return m_pool;
463 }
464 
465 
466 /**
467  * @brief Return a pointer to the underlying allocator.
468  */
469 template <class T>
470 inline
471 const ArenaHeapAllocator* ArenaSharedHeapSTLAllocator<T>::poolptr() const
472 {
473  return m_pool;
474 }
475 
476 
477 /**
478  * @brief Generate printable report for all contained allocators.
479  * @param os Stream to which to write the report.
480  */
481 template <class T>
482 void ArenaSharedHeapSTLAllocator<T>::report (std::ostream& os) const
483 {
484  m_header->report(os);
485 }
486 
487 
488 /**
489  * @brief Write-protect the memory managed by these allocators.
490  *
491  * Adjust protection on the memory managed by these allocators
492  * to disallow writes.
493  */
494 template <class T>
495 inline
496 void ArenaSharedHeapSTLAllocator<T>::protect()
497 {
498  m_header->protect();
499 }
500 
501 
502 /**
503  * @brief Write-enable the memory managed by these allocators.
504  *
505  * Adjust protection on the memory managed by these allocators
506  * to allow writes.
507  */
508 template <class T>
509 inline
510 void ArenaSharedHeapSTLAllocator<T>::unprotect()
511 {
512  m_header->unprotect();
513 }
514 
515 
516 /**
517  * @brief Hook for unprotecting an arena.
518  *
519  * Sometimes we need to ensure that an arena is unprotected before we start
520  * destroying an object that contains the arena. To do that without
521  * making assumptions about whether the arena supports an unprotect
522  * operation, call this function.
523  */
524 template <class T>
525 void maybeUnprotect (ArenaSharedHeapSTLAllocator<T>& a)
526 {
527  a.unprotect();
528 }
529 
530 
531 } // namespace SG