14 #ifdef __SYCL_DEVICE_ONLY__
19 #ifndef __SYCL_DEVICE_ONLY__
22 #include <type_traits>
26 namespace ext::oneapi {
30 using namespace ::sycl::detail;
36 static constexpr
bool value =
37 (std::is_same<T, int>::value || std::is_same<T, unsigned int>::value ||
38 std::is_same<T, long>::value || std::is_same<T, unsigned long>::value ||
39 std::is_same<T, long long>::value ||
40 std::is_same<T, unsigned long long>::value ||
41 std::is_same<T, float>::value || std::is_same<T, double>::value ||
42 std::is_pointer<T>::value);
45 template <sycl::access::address_space AS>
48 AS == access::address_space::local_space ||
49 AS == access::address_space::ext_intel_global_device_space>;
52 template <memory_order Order>
54 Order == memory_order::acq_rel ||
55 Order == memory_order::seq_cst>;
80 case memory_order::__consume_unsupported:
90 template <
typename T,
typename =
void>
struct bit_equal;
94 bool operator()(
const T &lhs,
const T &rhs) {
return lhs == rhs; }
99 auto LhsInt = sycl::bit_cast<uint32_t>(lhs);
100 auto RhsInt = sycl::bit_cast<uint32_t>(rhs);
101 return LhsInt == RhsInt;
107 auto LhsInt = sycl::bit_cast<uint64_t>(lhs);
108 auto RhsInt = sycl::bit_cast<uint64_t>(rhs);
109 return LhsInt == RhsInt;
119 "Invalid atomic type. Valid types are int, unsigned int, long, "
120 "unsigned long, long long, unsigned long long, float, double "
121 "and pointer types");
123 "Invalid atomic address_space. Valid address spaces are: "
124 "global_space, local_space, ext_intel_global_device_space");
127 "Invalid default memory_order for atomics. Valid defaults are: "
128 "relaxed, acq_rel, seq_cst");
132 static constexpr
size_t required_alignment =
sizeof(T);
133 static constexpr
bool is_always_lock_free =
139 static constexpr
memory_order default_read_modify_write_order = DefaultOrder;
146 #ifdef __SYCL_DEVICE_ONLY__
152 : ptr(reinterpret_cast<
std::atomic<T> *>(&ref)) {}
161 #ifdef __SYCL_DEVICE_ONLY__
162 detail::spirv::AtomicStore(ptr, scope, order, operand);
176 #ifdef __SYCL_DEVICE_ONLY__
177 return detail::spirv::AtomicLoad(ptr, scope, order);
184 operator T() const noexcept {
return load(); }
188 #ifdef __SYCL_DEVICE_ONLY__
189 return detail::spirv::AtomicExchange(ptr, scope, order, operand);
200 #ifdef __SYCL_DEVICE_ONLY__
201 T value = detail::spirv::AtomicCompareExchange(ptr, scope,
success, failure,
210 return ptr->compare_exchange_strong(expected, desired,
220 return compare_exchange_strong(expected, desired, order, order, scope);
230 #ifdef __SYCL_DEVICE_ONLY__
231 return compare_exchange_strong(expected, desired,
success, failure, scope);
234 return ptr->compare_exchange_weak(expected, desired,
244 return compare_exchange_weak(expected, desired, order, order, scope);
248 #ifdef __SYCL_DEVICE_ONLY__
259 :
public atomic_ref_base<T, DefaultOrder, DefaultScope, AddressSpace> {
269 typename detail::
enable_if_t<std::is_integral<T>::value>>
270 :
public atomic_ref_base<T, DefaultOrder, DefaultScope, AddressSpace> {
275 static constexpr
size_t required_alignment =
sizeof(T);
276 static constexpr
bool is_always_lock_free =
282 static constexpr
memory_order default_read_modify_write_order = DefaultOrder;
289 AddressSpace>::compare_exchange_weak;
294 #ifdef __SYCL_DEVICE_ONLY__
295 return detail::spirv::AtomicIAdd(ptr, scope, order, operand);
303 return fetch_add(operand) + operand;
313 return fetch_add(1) + 1;
318 #ifdef __SYCL_DEVICE_ONLY__
319 return detail::spirv::AtomicISub(ptr, scope, order, operand);
327 return fetch_sub(operand) - operand;
337 return fetch_sub(1) - 1;
342 #ifdef __SYCL_DEVICE_ONLY__
343 return detail::spirv::AtomicAnd(ptr, scope, order, operand);
351 return fetch_and(operand) & operand;
356 #ifdef __SYCL_DEVICE_ONLY__
357 return detail::spirv::AtomicOr(ptr, scope, order, operand);
364 T
operator|=(T operand)
const noexcept {
return fetch_or(operand) | operand; }
368 #ifdef __SYCL_DEVICE_ONLY__
369 return detail::spirv::AtomicXor(ptr, scope, order, operand);
377 return fetch_xor(operand) ^ operand;
382 #ifdef __SYCL_DEVICE_ONLY__
383 return detail::spirv::AtomicMin(ptr, scope, order, operand);
386 T old = load(load_order, scope);
387 while (operand < old &&
388 !compare_exchange_weak(old, operand, order, scope)) {
396 #ifdef __SYCL_DEVICE_ONLY__
397 return detail::spirv::AtomicMax(ptr, scope, order, operand);
400 T old = load(load_order, scope);
401 while (operand > old &&
402 !compare_exchange_weak(old, operand, order, scope)) {
416 T, DefaultOrder, DefaultScope, AddressSpace,
417 typename detail::
enable_if_t<std::is_floating_point<T>::value>>
418 :
public atomic_ref_base<T, DefaultOrder, DefaultScope, AddressSpace> {
423 static constexpr
size_t required_alignment =
sizeof(T);
424 static constexpr
bool is_always_lock_free =
430 static constexpr
memory_order default_read_modify_write_order = DefaultOrder;
437 AddressSpace>::compare_exchange_weak;
444 #if defined(__SYCL_DEVICE_ONLY__) && defined(SYCL_USE_NATIVE_FP_ATOMICS)
445 return detail::spirv::AtomicFAdd(ptr, scope, order, operand);
452 load(load_order, scope);
453 desired = expected + operand;
454 }
while (!compare_exchange_weak(expected, desired, order, scope));
460 return fetch_add(operand) + operand;
467 #if defined(__SYCL_DEVICE_ONLY__) && defined(SYCL_USE_NATIVE_FP_ATOMICS)
468 return detail::spirv::AtomicFAdd(ptr, scope, order, -operand);
471 T expected = load(load_order, scope);
474 desired = expected - operand;
475 }
while (!compare_exchange_weak(expected, desired, order, scope));
481 return fetch_sub(operand) - operand;
488 #if defined(__SYCL_DEVICE_ONLY__) && defined(SYCL_USE_NATIVE_FP_ATOMICS)
489 return detail::spirv::AtomicMin(ptr, scope, order, operand);
492 T old = load(load_order, scope);
493 while (operand < old &&
494 !compare_exchange_weak(old, operand, order, scope)) {
504 #if defined(__SYCL_DEVICE_ONLY__) && defined(SYCL_USE_NATIVE_FP_ATOMICS)
505 return detail::spirv::AtomicMax(ptr, scope, order, operand);
508 T old = load(load_order, scope);
509 while (operand > old &&
510 !compare_exchange_weak(old, operand, order, scope)) {
536 static constexpr
size_t required_alignment =
sizeof(T *);
537 static constexpr
bool is_always_lock_free =
543 static constexpr
memory_order default_read_modify_write_order = DefaultOrder;
546 using base_type::is_lock_free;
552 base_type::store(
reinterpret_cast<uintptr_t
>(operand), order, scope);
562 return reinterpret_cast<T *
>(base_type::load(order, scope));
565 operator T *()
const noexcept {
return load(); }
569 return reinterpret_cast<T *
>(base_type::exchange(
570 reinterpret_cast<uintptr_t
>(operand), order, scope));
581 expected = load(load_order, scope);
582 desired = expected + operand;
583 }
while (!compare_exchange_weak(expected, desired, order, scope));
588 return fetch_add(operand) + operand;
602 T *expected = load(load_order, scope);
605 desired = expected - operand;
606 }
while (!compare_exchange_weak(expected, desired, order, scope));
611 return fetch_sub(operand) - operand;
624 return base_type::compare_exchange_strong(
625 reinterpret_cast<uintptr_t &
>(expected),
626 reinterpret_cast<uintptr_t
>(desired),
success, failure, scope);
633 return compare_exchange_strong(expected, desired, order, order, scope);
640 return base_type::compare_exchange_weak(
641 reinterpret_cast<uintptr_t &
>(expected),
642 reinterpret_cast<uintptr_t
>(desired),
success, failure, scope);
649 return compare_exchange_weak(expected, desired, order, order, scope);
653 using base_type::ptr;
665 AddressSpace>::atomic_ref_impl;
667 AddressSpace>::operator=;