7#ifndef CALORECGPU_TEMPORARYHELPERS_H
8#define CALORECGPU_TEMPORARYHELPERS_H
10#define CALORECGPU_TEMP_CONCAT_HELPER_INNER(A, ...) A ## __VA_ARGS__
11#define CALORECGPU_TEMP_CONCAT_HELPER(A, B) CALORECGPU_TEMP_CONCAT_HELPER_INNER(A, B)
21#ifdef CALORECGPU_TEMP_STRUCT_TO_USE
25 template <
class T,
class PtrLike>
26 __host__ __device__ T * get_pointer_to_temp_struct(PtrLike && p)
28 return std::launder(
reinterpret_cast<T *
>(&(p->moments)));
32#define CALORECGPU_TEMPARR_BASE_1D(TEMPNAME, TYPE) _Pragma("nv_diag_suppress 177") \
33 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int idx) \
35 return CaloRecGPU::get_pointer_to_temp_struct<const CALORECGPU_TEMP_STRUCT_TO_USE>(arr)->TEMPNAME + idx; \
37 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int idx) \
39 return CaloRecGPU::get_pointer_to_temp_struct<CALORECGPU_TEMP_STRUCT_TO_USE>(arr)->TEMPNAME + idx; \
41 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int idx) \
43 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
45 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int idx) \
47 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
48 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
50#define CALORECGPU_TEMPARR_BASE_2D(TEMPNAME, TYPE) _Pragma("nv_diag_suppress 177") \
51 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
53 return &(CaloRecGPU::get_pointer_to_temp_struct<const CALORECGPU_TEMP_STRUCT_TO_USE>(arr)->TEMPNAME[jdx][idx]); \
55 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
57 return &(CaloRecGPU::get_pointer_to_temp_struct<CALORECGPU_TEMP_STRUCT_TO_USE>(arr)->TEMPNAME[jdx][idx]); \
59 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
61 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, jdx, idx); \
63 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
65 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, jdx, idx); \
66 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
71#define CALORECGPU_TEMPARR_1(TEMPNAME, BASEVAR, TYPE) CALORECGPU_TEMPARR_BASE_1D(TEMPNAME, TYPE)
76#define CALORECGPU_TEMPARR_2(TEMPNAME, BASEVAR1, BASEVAR2, TYPE) CALORECGPU_TEMPARR_BASE_1D(TEMPNAME, TYPE)
81#define CALORECGPU_TEMP2DARR_1(TEMPNAME, BASEVAR, TYPE) CALORECGPU_TEMPARR_BASE_2D(TEMPNAME, TYPE)
86#define CALORECGPU_TEMP2DARR_2(TEMPNAME, BASEVAR1, BASEVAR2, TYPE) CALORECGPU_TEMPARR_BASE_2D(TEMPNAME, TYPE)
93#define CALORECGPU_TEMPBIGARR_1(TEMPNAME, BASEVAR, TYPE) CALORECGPU_TEMPARR_BASE_1D(TEMPNAME, TYPE)
100#define CALORECGPU_TEMPBIGARR_2(TEMPNAME, BASEVAR1, BASEVAR2, TYPE) CALORECGPU_TEMPARR_BASE_1D(TEMPNAME, TYPE)
107#define CALORECGPU_TEMPBIGARR_3(TEMPNAME, BASEVAR1, BASEVAR2, BASEVAR3, TYPE) CALORECGPU_TEMPARR_BASE_1D(TEMPNAME, TYPE)
112#define CALORECGPU_TEMPCELLARR_1(TEMPNAME, BASEVAR1, BASEVAR2, BASEVAR3, TYPE) CALORECGPU_TEMPARR_BASE_1D(TEMPNAME, TYPE)
117#define CALORECGPU_TEMPCELLARR_2(TEMPNAME, BASEVAR1, BASEVAR2, BASEVAR3, BASEVAR4, BASEVAR5, BASEVAR6, TYPE) CALORECGPU_TEMPARR_BASE_1D(TEMPNAME, TYPE)
126#define CALORECGPU_TEMPVAR(TEMPNAME, BASEVAR, INDEX, TYPE) _Pragma("nv_diag_suppress 177") \
127 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr) \
129 return &CaloRecGPU::get_pointer_to_temp_struct<const CALORECGPU_TEMP_STRUCT_TO_USE>(arr)->TEMPNAME; \
131 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr) \
133 return &CaloRecGPU::get_pointer_to_temp_struct<CALORECGPU_TEMP_STRUCT_TO_USE>(arr)->TEMPNAME; \
135 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr) \
137 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr); \
139 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr) \
141 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr); \
142 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
145#define CALORECGPU_TEMPWRAPPER(TEMPNAME, WRAPPED) _Pragma("nv_diag_suppress 177") \
146 template <class PtrLike, class ... Args> __host__ __device__ decltype(auto) TEMPNAME (PtrLike && p, Args && ... args) \
148 return std::forward<PtrLike>(p)-> WRAPPED (std::forward<Args>(args)...); \
149 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
160#include <type_traits>
167 template <
class T,
unsigned int ...
us,
class ... PtrLikes>
170 inline constexpr unsigned int get_extra_alignment(
const unsigned int base_align,
const unsigned int required)
172 const unsigned int delta = base_align % required;
174 return required * (delta != 0) - delta;
183 template <
class T,
unsigned int offset,
class PtrLike>
186 using PtrType = std::decay_t<
decltype(ptr[0])>;
187 using BasePtrType = std::conditional_t<std::is_const_v<PtrType>,
const char *,
char *>;
189 constexpr unsigned int base_offset = offset %
alignof(T);
193 BasePtrType base_ptr =
reinterpret_cast<BasePtrType
>(&ptr[0]);
195 return std::launder(
reinterpret_cast<T *
>(base_ptr + extra_alignment + idx *
sizeof(T)));
198 template <
class T,
unsigned int offset,
unsigned int ... us,
class PtrLike,
class ... PtrLikes>
201 using PtrType = std::decay_t<
decltype(ptr[0])>;
203 constexpr unsigned int max_size =
NMaxClusters *
sizeof(PtrType);
205 constexpr unsigned int base_offset = offset %
alignof(T);
208 constexpr unsigned int real_size = (max_size - extra_alignment) /
sizeof(T);
218 template <
class T,
class ... PtrLikes>
221 static_assert(
alignof(T) <=
alignof(double),
"We don't support aligning in this case...");
232 template <
class T,
class ... PtrLikes>
235 template <
class T,
class PtrLike>
238 static_assert(
alignof(T) <=
alignof(double),
"We don't support aligning in this case...");
240 using PtrType = std::decay_t<
decltype(ptr[0][0])>;
241 using BasePtrType = std::conditional_t<std::is_const_v<PtrType>,
const char *,
char *>;
243 constexpr unsigned int num_per_array = (
NMaxClusters *
sizeof(PtrType)) /
sizeof(T);
245 const unsigned int first_idx = idx / num_per_array;
247 const unsigned int second_idx = idx % num_per_array;
252 template <
class T,
class PtrLike,
class ... PtrLikes>
255 static_assert(
alignof(T) <=
alignof(double));
257 using PtrType = std::decay_t<
decltype(ptr[0][0])>;
259 constexpr unsigned int num_per_array = (
NMaxClusters *
sizeof(PtrType)) /
sizeof(T);
260 constexpr unsigned int total_num = num_per_array *
NumSamplings;
269#define CALORECGPU_TEMPARR_1(TEMPNAME, BASEVAR, TYPE) _Pragma("nv_diag_suppress 177") \
270 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int idx) \
272 return CaloRecGPU::get_laundered_pointer<const TYPE, offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR)>(idx, arr->moments. BASEVAR); \
274 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int idx) \
276 return CaloRecGPU::get_laundered_pointer<TYPE, offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR)>(idx, arr->moments. BASEVAR); \
278 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int idx) \
280 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
282 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int idx) \
284 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
285 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
291#define CALORECGPU_TEMPARR_2(TEMPNAME, BASEVAR1, BASEVAR2, TYPE) _Pragma("nv_diag_suppress 177") \
292 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int idx) \
294 return CaloRecGPU::get_laundered_pointer<const TYPE, \
295 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR1), \
296 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR2)> \
297 (idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2); \
299 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int idx) \
301 return CaloRecGPU::get_laundered_pointer<TYPE, \
302 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR1), \
303 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR2)> \
304 (idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2); \
306 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int idx) \
308 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
310 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int idx) \
312 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
313 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
319#define CALORECGPU_TEMP2DARR_1(TEMPNAME, BASEVAR, TYPE) _Pragma("nv_diag_suppress 177") \
320 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
322 return CaloRecGPU::get_laundered_pointer_striped<const TYPE>(jdx, idx, arr->moments. BASEVAR); \
324 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
326 return CaloRecGPU::get_laundered_pointer_striped<TYPE>(jdx, idx, arr->moments. BASEVAR); \
328 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
330 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, jdx, idx); \
332 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
334 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, jdx, idx); \
335 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
341#define CALORECGPU_TEMP2DARR_2(TEMPNAME, BASEVAR1, BASEVAR2, TYPE) _Pragma("nv_diag_suppress 177") \
342 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
344 return CaloRecGPU::get_laundered_pointer_striped<const TYPE>(jdx, idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2); \
346 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
348 return CaloRecGPU::get_laundered_pointer_striped<TYPE>(jdx, idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2); \
350 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
352 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, jdx, idx); \
354 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int jdx, const unsigned int idx) \
356 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, jdx, idx); \
357 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
365#define CALORECGPU_TEMPBIGARR_1(TEMPNAME, BASEVAR, TYPE) _Pragma("nv_diag_suppress 177") \
366 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int idx) \
368 return CaloRecGPU::get_laundered_pointer_stacked<const TYPE>(idx, arr->moments. BASEVAR); \
370 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int idx) \
372 return CaloRecGPU::get_laundered_pointer_stacked<TYPE>(idx, arr->moments. BASEVAR); \
374 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int idx) \
376 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
378 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int idx) \
380 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
381 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
388#define CALORECGPU_TEMPBIGARR_2(TEMPNAME, BASEVAR1, BASEVAR2, TYPE) _Pragma("nv_diag_suppress 177") \
389 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int idx) \
391 return CaloRecGPU::get_laundered_pointer_stacked<const TYPE>(idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2); \
393 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int idx) \
395 return CaloRecGPU::get_laundered_pointer_stacked<TYPE>(idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2); \
397 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int idx) \
399 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
401 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int idx) \
403 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
404 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
411#define CALORECGPU_TEMPBIGARR_3(TEMPNAME, BASEVAR1, BASEVAR2, BASEVAR3, TYPE) _Pragma("nv_diag_suppress 177") \
412 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int idx) \
414 return CaloRecGPU::get_laundered_pointer_stacked<const TYPE>(idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2, arr->moments. BASEVAR3); \
416 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int idx) \
418 return CaloRecGPU::get_laundered_pointer_stacked<TYPE>(idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2, arr->moments. BASEVAR3); \
420 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int idx) \
422 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
424 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int idx) \
426 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
427 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
432#define CALORECGPU_TEMPCELLARR_1(TEMPNAME, BASEVAR1, BASEVAR2, BASEVAR3, TYPE) _Pragma("nv_diag_suppress 177") \
433 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int idx) \
435 return CaloRecGPU::get_laundered_pointer<const TYPE, \
436 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR1), \
437 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR2), \
438 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR3)> \
439 (idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2, arr->moments. BASEVAR3); \
441 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int idx) \
443 return CaloRecGPU::get_laundered_pointer<TYPE, \
444 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR1), \
445 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR2), \
446 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR3)> \
447 (idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2, arr->moments. BASEVAR3); \
449 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int idx) \
451 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
453 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int idx) \
455 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
456 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
462#define CALORECGPU_TEMPCELLARR_2(TEMPNAME, BASEVAR1, BASEVAR2, BASEVAR3, BASEVAR4, BASEVAR5, BASEVAR6, TYPE) _Pragma("nv_diag_suppress 177") \
463 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr, const unsigned int idx) \
465 return CaloRecGPU::get_laundered_pointer<const TYPE, \
466 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR1), \
467 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR2), \
468 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR3), \
469 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR4), \
470 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR5), \
471 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR6)> \
472 (idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2, arr->moments. BASEVAR3, \
473 arr->moments. BASEVAR4, arr->moments. BASEVAR5, arr->moments. BASEVAR6); \
475 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr, const unsigned int idx) \
477 return CaloRecGPU::get_laundered_pointer<TYPE, \
478 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR1), \
479 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR2), \
480 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR3), \
481 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR4), \
482 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR5), \
483 offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR6)> \
484 (idx, arr->moments. BASEVAR1, arr->moments. BASEVAR2, arr->moments. BASEVAR3, \
485 arr->moments. BASEVAR4, arr->moments. BASEVAR5, arr->moments. BASEVAR6); \
487 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr, const unsigned int idx) \
489 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
491 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr, const unsigned int idx) \
493 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr, idx); \
494 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
504#define CALORECGPU_TEMPVAR(TEMPNAME, BASEVAR, INDEX, TYPE) _Pragma("nv_diag_suppress 177") \
505 template <class PtrLike> __host__ __device__ const TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (const PtrLike & arr) \
507 static_assert(CaloRecGPU::check_sufficient_size<TYPE>(offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR), INDEX)); \
508 return CaloRecGPU::get_laundered_pointer<const TYPE, offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR)>(INDEX, arr->moments. BASEVAR); \
510 template <class PtrLike> __host__ __device__ TYPE * CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr) (PtrLike & arr) \
512 static_assert(CaloRecGPU::check_sufficient_size<TYPE>(offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR), INDEX)); \
513 return CaloRecGPU::get_laundered_pointer<TYPE, offsetof(CaloRecGPU::ClusterInfoArr::ClusterMomentsArr, BASEVAR)>(INDEX, arr->moments. BASEVAR); \
515 template <class PtrLike> __host__ __device__ const TYPE & TEMPNAME (const PtrLike & arr) \
517 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr); \
519 template <class PtrLike> __host__ __device__ TYPE & TEMPNAME (PtrLike & arr) \
521 return *CALORECGPU_TEMP_CONCAT_HELPER(TEMPNAME, _ptr)(arr); \
522 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
525#define CALORECGPU_TEMPWRAPPER(TEMPNAME, WRAPPED) _Pragma("nv_diag_suppress 177") \
526 template <class PtrLike, class ... Args> __host__ __device__ decltype(auto) TEMPNAME (PtrLike && p, Args && ... args) \
528 return std::forward<PtrLike>(p)-> WRAPPED (std::forward<Args>(args)...); \
529 } _Pragma("nv_diag_default 177") struct to_end_with_semicolon
Copyright (C) 2002-2025 CERN for the benefit of the ATLAS collaboration.
constexpr int NumSamplings
__host__ __device__ T * get_laundered_pointer_stacked(unsigned int idx, PtrLikes &&... ps)
constexpr unsigned int get_extra_alignment(const unsigned int base_align, const unsigned int required)
__host__ __device__ T * get_laundered_pointer(unsigned int idx, PtrLikes &&... p)
constexpr int NMaxClusters
__host__ __device__ T * get_laundered_pointer_striped(unsigned int jdx, unsigned int idx, PtrLikes &&... ps)
constexpr bool __host__ __device__ check_sufficient_size(unsigned int offset, unsigned int index)