52 static constexpr bool const sync_after_init =
true;
53 using Shape = sycl::range<Dims>;
55 using pointer_type = value_type *;
56 static constexpr auto dims = Dims;
58 using ncT =
typename std::remove_const<value_type>::type;
59 using LocalData = sycl::local_accessor<ncT, Dims>;
61 CachedData(T *global_data, Shape shape, sycl::handler &cgh)
63 this->global_data = global_data;
64 local_data = LocalData(shape, cgh);
67 T *get_ptr()
const {
return &local_data[0]; }
70 void init(
const sycl::nd_item<_Dims> &item)
const
72 uint32_t llid = item.get_local_linear_id();
73 auto local_ptr = &local_data[0];
74 uint32_t size = local_data.size();
75 auto group = item.get_group();
76 uint32_t local_size = group.get_local_linear_range();
78 for (uint32_t i = llid; i < size; i += local_size) {
79 local_ptr[i] = global_data[i];
83 size_t size()
const {
return local_data.size(); }
85 T &operator[](
const sycl::id<Dims> &
id)
const {
return local_data[id]; }
87 template <
typename = std::enable_if_t<Dims == 1>>
88 T &operator[](
const size_t id)
const
90 return local_data[id];
95 value_type *global_data =
nullptr;
101 static constexpr bool const sync_after_init =
false;
102 using Shape = sycl::range<Dims>;
103 using value_type = T;
104 using pointer_type = value_type *;
105 static constexpr auto dims = Dims;
107 UncachedData(T *global_data,
const Shape &shape, sycl::handler &)
109 this->global_data = global_data;
113 T *get_ptr()
const {
return global_data; }
116 void init(
const sycl::nd_item<_Dims> &)
const
120 size_t size()
const {
return _shape.size(); }
122 T &operator[](
const sycl::id<Dims> &
id)
const {
return global_data[id]; }
124 template <
typename = std::enable_if_t<Dims == 1>>
125 T &operator[](
const size_t id)
const
127 return global_data[id];
131 T *global_data =
nullptr;
156 static constexpr bool const sync_after_init =
true;
157 static constexpr bool const sync_before_finalize =
true;
159 using LocalHist = sycl::local_accessor<localT, 2>;
163 int32_t copies_count,
166 local_hist = LocalHist(sycl::range<2>(copies_count, bins_count), cgh);
167 global_hist = global_data;
171 void init(
const sycl::nd_item<_Dims> &item, localT val = 0)
const
173 uint32_t llid = item.get_local_linear_id();
174 auto *local_ptr = &local_hist[0][0];
175 uint32_t size = local_hist.size();
176 auto group = item.get_group();
177 uint32_t local_size = group.get_local_linear_range();
179 for (uint32_t i = llid; i < size; i += local_size) {
185 void add(
const sycl::nd_item<_Dims> &item, int32_t bin, localT value)
const
187 int32_t llid = item.get_local_linear_id();
188 int32_t local_hist_count = local_hist.get_range().get(0);
189 int32_t local_copy_id =
190 local_hist_count == 1 ? 0 : llid % local_hist_count;
192 AtomicOp<localT, sycl::memory_order::relaxed,
193 sycl::memory_scope::work_group>::add(local_hist[local_copy_id]
199 void finalize(
const sycl::nd_item<_Dims> &item)
const
201 uint32_t llid = item.get_local_linear_id();
202 uint32_t bins_count = local_hist.get_range().get(1);
203 uint32_t local_hist_count = local_hist.get_range().get(0);
204 auto group = item.get_group();
205 uint32_t local_size = group.get_local_linear_range();
207 for (uint32_t i = llid; i < bins_count; i += local_size) {
208 auto value = local_hist[0][i];
209 for (uint32_t lhc = 1; lhc < local_hist_count; ++lhc) {
210 value += local_hist[lhc][i];
213 AtomicOp<T, sycl::memory_order::relaxed,
214 sycl::memory_scope::device>::add(global_hist[i],
220 uint32_t size()
const {
return local_hist.size(); }
223 LocalHist local_hist;
224 T *global_hist =
nullptr;
230 static constexpr bool const sync_after_init =
false;
231 static constexpr bool const sync_before_finalize =
false;
236 void init(
const sycl::nd_item<_Dims> &)
const
241 void add(
const sycl::nd_item<_Dims> &, int32_t bin, T value)
const
243 AtomicOp<T, sycl::memory_order::relaxed,
244 sycl::memory_scope::device>::add(global_hist[bin], value);
248 void finalize(
const sycl::nd_item<_Dims> &)
const
253 T *global_hist =
nullptr;