sp_counted_base_gcc_atomic.hpp 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. #ifndef BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_GCC_ATOMIC_HPP_INCLUDED
  2. #define BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_GCC_ATOMIC_HPP_INCLUDED
  3. // MS compatible compilers support #pragma once
  4. #if defined(_MSC_VER) && (_MSC_VER >= 1020)
  5. # pragma once
  6. #endif
  7. // detail/sp_counted_base_gcc_atomic.hpp - g++ 4.7+ __atomic intrinsics
  8. //
  9. // Copyright 2007, 2020 Peter Dimov
  10. // Distributed under the Boost Software License, Version 1.0.
  11. // https://www.boost.org/LICENSE_1_0.txt
  12. #include <boost/smart_ptr/detail/sp_typeinfo_.hpp>
  13. #include <boost/config.hpp>
  14. #include <boost/cstdint.hpp>
  15. #if defined(BOOST_SP_REPORT_IMPLEMENTATION)
  16. #include <boost/config/pragma_message.hpp>
  17. BOOST_PRAGMA_MESSAGE("Using __atomic sp_counted_base")
  18. #endif
  19. namespace boost
  20. {
  21. namespace detail
  22. {
  23. inline void atomic_increment( boost::uint_least32_t * pw )
  24. {
  25. __atomic_fetch_add( pw, 1, __ATOMIC_RELAXED );
  26. }
  27. inline boost::uint_least32_t atomic_decrement( boost::uint_least32_t * pw )
  28. {
  29. return __atomic_fetch_sub( pw, 1, __ATOMIC_ACQ_REL );
  30. }
  31. inline boost::uint_least32_t atomic_conditional_increment( boost::uint_least32_t * pw )
  32. {
  33. // long r = *pw;
  34. // if( r != 0 ) ++*pw;
  35. // return r;
  36. boost::uint_least32_t r = __atomic_load_n( pw, __ATOMIC_RELAXED );
  37. for( ;; )
  38. {
  39. if( r == 0 )
  40. {
  41. return r;
  42. }
  43. if( __atomic_compare_exchange_n( pw, &r, r + 1, true, __ATOMIC_RELAXED, __ATOMIC_RELAXED ) )
  44. {
  45. return r;
  46. }
  47. }
  48. }
  49. inline boost::uint_least32_t atomic_load( boost::uint_least32_t const * pw )
  50. {
  51. return __atomic_load_n( pw, __ATOMIC_ACQUIRE );
  52. }
  53. class BOOST_SYMBOL_VISIBLE sp_counted_base
  54. {
  55. private:
  56. sp_counted_base( sp_counted_base const & );
  57. sp_counted_base & operator= ( sp_counted_base const & );
  58. boost::uint_least32_t use_count_; // #shared
  59. boost::uint_least32_t weak_count_; // #weak + (#shared != 0)
  60. public:
  61. sp_counted_base(): use_count_( 1 ), weak_count_( 1 )
  62. {
  63. }
  64. virtual ~sp_counted_base() // nothrow
  65. {
  66. }
  67. // dispose() is called when use_count_ drops to zero, to release
  68. // the resources managed by *this.
  69. virtual void dispose() = 0; // nothrow
  70. // destroy() is called when weak_count_ drops to zero.
  71. virtual void destroy() // nothrow
  72. {
  73. delete this;
  74. }
  75. virtual void * get_deleter( sp_typeinfo_ const & ti ) = 0;
  76. virtual void * get_local_deleter( sp_typeinfo_ const & ti ) = 0;
  77. virtual void * get_untyped_deleter() = 0;
  78. void add_ref_copy()
  79. {
  80. atomic_increment( &use_count_ );
  81. }
  82. bool add_ref_lock() // true on success
  83. {
  84. return atomic_conditional_increment( &use_count_ ) != 0;
  85. }
  86. void release() // nothrow
  87. {
  88. if( atomic_decrement( &use_count_ ) == 1 )
  89. {
  90. dispose();
  91. weak_release();
  92. }
  93. }
  94. void weak_add_ref() // nothrow
  95. {
  96. atomic_increment( &weak_count_ );
  97. }
  98. void weak_release() // nothrow
  99. {
  100. if( atomic_decrement( &weak_count_ ) == 1 )
  101. {
  102. destroy();
  103. }
  104. }
  105. long use_count() const // nothrow
  106. {
  107. return static_cast<long>( atomic_load( &use_count_ ) );
  108. }
  109. };
  110. } // namespace detail
  111. } // namespace boost
  112. #endif // #ifndef BOOST_SMART_PTR_DETAIL_SP_COUNTED_BASE_SYNC_HPP_INCLUDED