16 #ifndef __SYCL_DEVICE_ONLY__
21 #include <type_traits>
23 #define __SYCL_STATIC_ASSERT_NOT_FLOAT(T) \
24 static_assert(!std::is_same<T, float>::value, \
25 "SYCL atomic function not available for float type")
31 template <
typename po
interT, access::address_space addressSpace>
39 static constexpr
bool value =
40 (std::is_same<T, int>::value || std::is_same<T, unsigned int>::value ||
41 std::is_same<T, long>::value || std::is_same<T, unsigned long>::value ||
42 std::is_same<T, long long>::value ||
43 std::is_same<T, unsigned long long>::value ||
44 std::is_same<T, float>::value);
48 static constexpr
bool value =
49 (AS == access::address_space::global_space ||
50 AS == access::address_space::local_space ||
51 AS == access::address_space::global_device_space);
72 #ifndef __SYCL_DEVICE_ONLY__
82 return std::memory_order_relaxed;
102 template <
typename T>
108 template <
typename T>
114 template <
typename T>
120 template <
typename T>
126 template <
typename T>
132 template <
typename T>
138 template <
typename T>
142 T Val = Ptr->load(MemoryOrder);
144 if (Ptr->compare_exchange_strong(Val, V, MemoryOrder, MemoryOrder))
146 Val = Ptr->load(MemoryOrder);
151 template <
typename T>
155 T Val = Ptr->load(MemoryOrder);
157 if (Ptr->compare_exchange_strong(Val, V, MemoryOrder, MemoryOrder))
159 Val = Ptr->load(MemoryOrder);
164 #endif // !defined(__SYCL_DEVICE_ONLY__)
170 access::address_space::global_space>
172 "
sycl::atomic is deprecated since SYCL 2020") atomic {
173 friend class atomic<T, access::address_space::global_space>;
174 static_assert(detail::IsValidAtomicType<T>::value,
175 "Invalid SYCL atomic type. Valid types are: int, "
176 "unsigned int, long, unsigned long, long long, unsigned "
178 static_assert(detail::IsValidAtomicAddressSpace<addressSpace>::value,
179 "Invalid SYCL atomic address_space. Valid address spaces are: "
180 "global_space, local_space, global_device_space");
181 static constexpr auto SpirvScope =
185 template <typename pointerT>
186 #ifdef __SYCL_DEVICE_ONLY__
187 atomic(multi_ptr<pointerT, addressSpace> ptr)
190 atomic(multi_ptr<pointerT, addressSpace> ptr)
191 : Ptr(reinterpret_cast<std::atomic<T> *>(ptr.get()))
194 static_assert(
sizeof(T) ==
sizeof(pointerT),
195 "T and pointerT must be same size");
198 #ifdef __ENABLE_USM_ADDR_SPACE__
202 _Space == addressSpace &&
203 addressSpace == access::address_space::global_space>>
204 atomic(
const atomic<T, access::address_space::global_device_space> &RHS) {
210 _Space == addressSpace &&
211 addressSpace == access::address_space::global_space>>
212 atomic(atomic<T, access::address_space::global_device_space> &&RHS) {
215 #endif // __ENABLE_USM_ADDR_SPACE__
217 void store(T Operand,
memory_order Order = memory_order::relaxed) {
222 #ifdef __SYCL_DEVICE_ONLY__
223 template <
typename T2 = T>
225 load(
memory_order Order = memory_order::relaxed)
const {
229 template <
typename T2 = T>
231 load(
memory_order Order = memory_order::relaxed)
const {
237 cl_float ResVal = bit_cast<cl_float>(TmpVal);
241 T load(
memory_order Order = memory_order::relaxed)
const {
247 T exchange(T Operand,
memory_order Order = memory_order::relaxed) {
253 compare_exchange_strong(T &Expected, T Desired,
257 #ifdef __SYCL_DEVICE_ONLY__
258 T Value = __spirv_AtomicCompareExchange(
262 if (Value == Expected)
268 return Ptr->compare_exchange_strong(Expected, Desired,
274 T fetch_add(T Operand,
memory_order Order = memory_order::relaxed) {
280 T fetch_sub(T Operand,
memory_order Order = memory_order::relaxed) {
286 T fetch_and(T Operand,
memory_order Order = memory_order::relaxed) {
292 T fetch_or(T Operand,
memory_order Order = memory_order::relaxed) {
298 T fetch_xor(T Operand,
memory_order Order = memory_order::relaxed) {
304 T fetch_min(T Operand,
memory_order Order = memory_order::relaxed) {
310 T fetch_max(T Operand,
memory_order Order = memory_order::relaxed) {
317 #ifdef __SYCL_DEVICE_ONLY__
324 template <
typename T, access::address_space addressSpace>
327 Object.store(Operand, MemoryOrder);
330 template <
typename T, access::address_space addressSpace>
333 return Object.load(MemoryOrder);
336 template <
typename T, access::address_space addressSpace>
339 return Object.exchange(Operand, MemoryOrder);
342 template <
typename T, access::address_space addressSpace>
344 atomic<T, addressSpace> Object, T &Expected, T Desired,
347 return Object.compare_exchange_strong(Expected, Desired, SuccessOrder,
351 template <
typename T, access::address_space addressSpace>
354 return Object.fetch_add(Operand, MemoryOrder);
357 template <
typename T, access::address_space addressSpace>
360 return Object.fetch_sub(Operand, MemoryOrder);
363 template <
typename T, access::address_space addressSpace>
366 return Object.fetch_and(Operand, MemoryOrder);
369 template <
typename T, access::address_space addressSpace>
372 return Object.fetch_or(Operand, MemoryOrder);
375 template <
typename T, access::address_space addressSpace>
378 return Object.fetch_xor(Operand, MemoryOrder);
381 template <
typename T, access::address_space addressSpace>
384 return Object.fetch_min(Operand, MemoryOrder);
387 template <
typename T, access::address_space addressSpace>
390 return Object.fetch_max(Operand, MemoryOrder);
396 #undef __SYCL_STATIC_ASSERT_NOT_FLOAT