ztomic.h 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226
  1. #ifndef __ZATOMIC_HPP__
  2. #define __ZATOMIC_HPP__
  3. #include <stdint.h>
  4. namespace atomic_impl
  5. {
  6. typedef enum memory_order
  7. {
  8. memory_order_relaxed,
  9. memory_order_consume,
  10. memory_order_acquire,
  11. memory_order_release,
  12. memory_order_acq_rel,
  13. memory_order_seq_cst
  14. } memory_order;
  15. #if defined( _MSC_VER )
  16. namespace detail
  17. {
  18. template<int param> struct msvc_fetch_add { };
  19. template<> struct msvc_fetch_add<4>
  20. {
  21. template<typename T, typename V>
  22. static T call(T* storage, V v)
  23. {
  24. return _InterlockedAdd((volatile long*)storage, (long)v);
  25. }
  26. };
  27. template<> struct msvc_fetch_add<8>
  28. {
  29. template<typename T, typename V>
  30. static T call(T* storage, V v)
  31. {
  32. return InterlockedAdd64((volatile LONGLONG*)storage, (LONGLONG)v);
  33. }
  34. };
  35. template<int param> struct msvc_cas { };
  36. template<> struct msvc_cas<4>
  37. {
  38. template<typename T, typename V>
  39. static bool call(T* storage, T* exp, V v)
  40. {
  41. unsigned long rc=InterlockedCompareExchange((volatile unsigned long *)storage, v, *exp);
  42. if (rc == *exp)
  43. return true;
  44. *exp = rc;
  45. return false;
  46. }
  47. };
  48. template<> struct msvc_cas<8>
  49. {
  50. template<typename T, typename V>
  51. static bool call(T* storage, T* exp, V v)
  52. {
  53. __int64 rc = InterlockedCompareExchange64((__int64 volatile *)storage, v, *exp);
  54. if (rc == *exp)
  55. return true;
  56. *exp = rc;
  57. return false;
  58. }
  59. };
  60. }
  61. template<typename T,typename V>
  62. static T fetch_add(T* storage, V v)
  63. {
  64. return atomic_impl::detail::msvc_fetch_add<sizeof(T)>::call(storage,v);
  65. }
  66. template<typename T, typename V>
  67. static bool cas(T* storage, T* exp, V v)
  68. {
  69. return atomic_impl::detail::msvc_cas<sizeof(T)>::call(storage, exp, v);
  70. }
  71. #elif defined( __IBMCPP__ ) && defined( __powerpc )
  72. #include<stdint.h>
  73. extern "builtin" void __lwsync(void);
  74. extern "builtin" void __isync(void);
  75. extern "builtin" int __fetch_and_add(volatile int32_t* addr, int val);
  76. extern "builtin" int64_t __fetch_and_addlp(volatile int64_t* addr, int64_t val);
  77. struct local_sync
  78. {
  79. local_sync()
  80. {
  81. __lwsync();
  82. }
  83. ~local_sync()
  84. {
  85. __isync();
  86. }
  87. };
  88. template<int param> struct xlc_fetch_add { };
  89. template<> struct xlc_fetch_add<4>
  90. {
  91. template<typename T,typename V>
  92. static T call(T* storage, V v)
  93. {
  94. local_sync _1;
  95. return __fetch_and_add (storage, v);
  96. }
  97. };
  98. template<> struct xlc_fetch_add<8>
  99. {
  100. template<typename T,typename V>
  101. static T call(T* storage, V v)
  102. {
  103. local_sync _1;
  104. return __fetch_and_addlp (storage, v);
  105. }
  106. };
  107. template<typename T,typename V>
  108. static T fetch_add(T* storage, V v)
  109. {
  110. return atomic_impl::xlc_fetch_add<sizeof(T)>::call(storage,v);
  111. }
  112. #elif defined( __GNUC__ )
  113. template<typename T,typename V>
  114. static void store(T* storage,V v)
  115. {
  116. __atomic_store_n(storage,v,memory_order_release);
  117. }
  118. template<typename T>
  119. static T load(T* storage)
  120. {
  121. return __atomic_load_n(storage, memory_order_acquire);
  122. }
  123. template<typename T,typename V>
  124. static T fetch_add(T* storage, V v)
  125. {
  126. return __sync_fetch_and_add (storage, v);
  127. }
  128. template<typename T,typename V>
  129. static bool cas(T* storage, T* exp, V v)
  130. {
  131. T ov=__sync_val_compare_and_swap(storage, *exp, v);
  132. if(ov==*exp)
  133. return true;
  134. *exp=ov;
  135. return false;
  136. }
  137. #else
  138. #error No ztomic operations implemented for this platform, sorry!
  139. #endif
  140. template<typename T> struct type_map {};
  141. template<> struct type_map<uint32_t> { typedef int32_t type; };
  142. template<> struct type_map<int32_t> { typedef int32_t type; };
  143. template<> struct type_map<uint64_t> { typedef int64_t type;};
  144. template<> struct type_map<int64_t> { typedef int64_t type;};
  145. }
  146. struct ztomic
  147. {
  148. template<typename V>
  149. static typename atomic_impl::type_map<V>::type cast(V v)
  150. {
  151. return static_cast<typename atomic_impl::type_map<V>::type>(v);
  152. }
  153. template<typename T,typename V>
  154. static T fetch_add(T *storage, V v)
  155. {
  156. return atomic_impl::fetch_add (storage, cast(v));
  157. }
  158. template<typename T,typename V>
  159. static T fetch_sub(T* storage, V v)
  160. {
  161. return ztomic::fetch_add(storage,-v);
  162. }
  163. template<typename T,typename V>
  164. static T add_fetch(T* storage, V v)
  165. {
  166. return ztomic::fetch_add(storage,v)+cast(v);
  167. }
  168. template<typename T,typename V>
  169. static T sub_fetch(T* storage, V v)
  170. {
  171. return ztomic::fetch_add(storage,-v)-cast(v);
  172. }
  173. template<typename T,typename V>
  174. static void store(T* storage,V v)
  175. {
  176. return atomic_impl::store(storage,v);
  177. }
  178. template<typename T>
  179. static T load(T* storage)
  180. {
  181. return atomic_impl::load(storage);
  182. }
  183. template<typename T,typename V>
  184. static bool cas(T* storage, T* exp, V v)
  185. {
  186. return atomic_impl::cas(storage,exp,v);
  187. }
  188. };
  189. #endif