/* * Distributed under the Boost Software License, Version 1.0. * (See accompanying file LICENSE_1_0.txt or copy at * http://www.boost.org/LICENSE_1_0.txt) * * Copyright (c) 2009 Helge Bahmann * Copyright (c) 2012 Tim Blechmann * Copyright (c) 2014 Andrey Semashev */ /*! * \file atomic/detail/ops_msvc_arm.hpp * * This header contains implementation of the \c operations template. */ #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_ #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_ #include #include #include #include #include #include #include #include #include #include #ifdef BOOST_HAS_PRAGMA_ONCE #pragma once #endif #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p)) #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p)) #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p)) #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p)) #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v)) #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v)) #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v)) #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v)) namespace lslboost { namespace atomics { namespace detail { // A note about memory_order_consume. Technically, this architecture allows to avoid // unnecessary memory barrier after consume load since it supports data dependency ordering. // However, some compiler optimizations may break a seemingly valid code relying on data // dependency tracking by injecting bogus branches to aid out of order execution. // This may happen not only in Boost.Atomic code but also in user's code, which we have no // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php. // For this reason we promote memory_order_consume to memory_order_acquire. struct msvc_arm_operations_base { static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false; static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true; static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT { __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later } static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT { BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u) hardware_full_fence(); BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); } static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT { BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); if (order == memory_order_seq_cst) hardware_full_fence(); BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); } static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT { BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u) hardware_full_fence(); BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); } static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT { // Combine order flags together and promote memory_order_consume to memory_order_acquire return static_cast< memory_order >(((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & ~static_cast< unsigned int >(memory_order_consume)) | (((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & static_cast< unsigned int >(memory_order_consume)) << 1u)); } }; template< std::size_t Size, bool Signed, typename Derived > struct msvc_arm_operations : public msvc_arm_operations_base { typedef typename make_storage_type< Size >::type storage_type; typedef typename make_storage_type< Size >::aligned aligned_storage_type; static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = Size; static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed; static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { typedef typename lslboost::atomics::detail::make_signed< storage_type >::type signed_storage_type; return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order); } static BOOST_FORCEINLINE bool compare_exchange_weak( storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT { return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order); } static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT { return !!Derived::exchange(storage, (storage_type)1, order); } static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT { Derived::store(storage, (storage_type)0, order); } }; template< bool Signed > struct operations< 1u, Signed > : public msvc_arm_operations< 1u, Signed, operations< 1u, Signed > > { typedef msvc_arm_operations< 1u, Signed, operations< 1u, Signed > > base_type; typedef typename base_type::storage_type storage_type; static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { base_type::fence_before_store(order); BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v); base_type::fence_after_store(order); } static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT { storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage); base_type::fence_after_load(order); return v; } static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v)); break; } return v; } static BOOST_FORCEINLINE bool compare_exchange_strong( storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT { storage_type previous = expected, old_val; switch (cas_common_order(success_order, failure_order)) { case memory_order_relaxed: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous)); break; case memory_order_consume: case memory_order_acquire: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous)); break; case memory_order_release: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous)); break; } expected = old_val; return (previous == old_val); } static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v)); break; } return v; } }; template< bool Signed > struct operations< 2u, Signed > : public msvc_arm_operations< 2u, Signed, operations< 2u, Signed > > { typedef msvc_arm_operations< 2u, Signed, operations< 2u, Signed > > base_type; typedef typename base_type::storage_type storage_type; static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { base_type::fence_before_store(order); BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v); base_type::fence_after_store(order); } static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT { storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage); base_type::fence_after_load(order); return v; } static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v)); break; } return v; } static BOOST_FORCEINLINE bool compare_exchange_strong( storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT { storage_type previous = expected, old_val; switch (cas_common_order(success_order, failure_order)) { case memory_order_relaxed: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous)); break; case memory_order_consume: case memory_order_acquire: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous)); break; case memory_order_release: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous)); break; } expected = old_val; return (previous == old_val); } static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v)); break; } return v; } }; template< bool Signed > struct operations< 4u, Signed > : public msvc_arm_operations< 4u, Signed, operations< 4u, Signed > > { typedef msvc_arm_operations< 4u, Signed, operations< 4u, Signed > > base_type; typedef typename base_type::storage_type storage_type; static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { base_type::fence_before_store(order); BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v); base_type::fence_after_store(order); } static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT { storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage); base_type::fence_after_load(order); return v; } static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v)); break; } return v; } static BOOST_FORCEINLINE bool compare_exchange_strong( storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT { storage_type previous = expected, old_val; switch (cas_common_order(success_order, failure_order)) { case memory_order_relaxed: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous)); break; case memory_order_consume: case memory_order_acquire: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous)); break; case memory_order_release: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous)); break; } expected = old_val; return (previous == old_val); } static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v)); break; } return v; } }; template< bool Signed > struct operations< 8u, Signed > : public msvc_arm_operations< 8u, Signed, operations< 8u, Signed > > { typedef msvc_arm_operations< 8u, Signed, operations< 8u, Signed > > base_type; typedef typename base_type::storage_type storage_type; static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { base_type::fence_before_store(order); BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v); base_type::fence_after_store(order); } static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT { storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage); base_type::fence_after_load(order); return v; } static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v)); break; } return v; } static BOOST_FORCEINLINE bool compare_exchange_strong( storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT { storage_type previous = expected, old_val; switch (cas_common_order(success_order, failure_order)) { case memory_order_relaxed: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous)); break; case memory_order_consume: case memory_order_acquire: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous)); break; case memory_order_release: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous)); break; } expected = old_val; return (previous == old_val); } static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v)); break; } return v; } static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT { switch (order) { case memory_order_relaxed: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v)); break; case memory_order_consume: case memory_order_acquire: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v)); break; case memory_order_release: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v)); break; case memory_order_acq_rel: case memory_order_seq_cst: default: v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v)); break; } return v; } }; BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT { BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); if (order != memory_order_relaxed) msvc_arm_operations_base::hardware_full_fence(); BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); } BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT { if (order != memory_order_relaxed) BOOST_ATOMIC_DETAIL_COMPILER_BARRIER(); } } // namespace detail } // namespace atomics } // namespace lslboost #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_