12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970 |
- /*
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or copy at
- * http://www.boost.org/LICENSE_1_0.txt)
- *
- * Copyright (c) 2020 Andrey Semashev
- */
- /*!
- * \file atomic/detail/fence_arch_ops_gcc_sparc.hpp
- *
- * This header contains implementation of the \c fence_arch_operations struct.
- */
- #ifndef BOOST_ATOMIC_DETAIL_FENCE_ARCH_OPS_GCC_SPARC_HPP_INCLUDED_
- #define BOOST_ATOMIC_DETAIL_FENCE_ARCH_OPS_GCC_SPARC_HPP_INCLUDED_
- #include <boost/memory_order.hpp>
- #include <boost/atomic/detail/config.hpp>
- #include <boost/atomic/detail/header.hpp>
- #ifdef BOOST_HAS_PRAGMA_ONCE
- #pragma once
- #endif
- namespace boost {
- namespace atomics {
- namespace detail {
- //! Fence operations for SPARC
- struct fence_arch_operations_gcc_sparc
- {
- static BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
- {
- switch (order)
- {
- case memory_order_release:
- __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
- break;
- case memory_order_consume:
- case memory_order_acquire:
- __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" ::: "memory");
- break;
- case memory_order_acq_rel:
- __asm__ __volatile__ ("membar #LoadLoad | #LoadStore | #StoreStore" ::: "memory");
- break;
- case memory_order_seq_cst:
- __asm__ __volatile__ ("membar #Sync" ::: "memory");
- break;
- case memory_order_relaxed:
- default:
- break;
- }
- }
- static BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
- {
- if (order != memory_order_relaxed)
- __asm__ __volatile__ ("" ::: "memory");
- }
- };
- typedef fence_arch_operations_gcc_sparc fence_arch_operations;
- } // namespace detail
- } // namespace atomics
- } // namespace boost
- #include <boost/atomic/detail/footer.hpp>
- #endif // BOOST_ATOMIC_DETAIL_FENCE_ARCH_OPS_GCC_SPARC_HPP_INCLUDED_
|