00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021 #ifndef __TBB_atomic_H
00022 #define __TBB_atomic_H
00023
00024 #include "tbb_stddef.h"
00025 #include <cstddef>
00026
00027 #if _MSC_VER
00028 #define __TBB_LONG_LONG __int64
00029 #else
00030 #define __TBB_LONG_LONG long long
00031 #endif
00032
00033 #include "tbb_machine.h"
00034
00035 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
00036
00037 #pragma warning (push)
00038 #pragma warning (disable: 4244 4267)
00039 #endif
00040
00041 namespace tbb {
00042
00044 enum memory_semantics {
00046 full_fence,
00048 acquire,
00050 release,
00052 relaxed
00053 };
00054
00056 namespace internal {
00057
00058 #if __TBB_ATTRIBUTE_ALIGNED_PRESENT
00059 #define __TBB_DECL_ATOMIC_FIELD(t,f,a) t f __attribute__ ((aligned(a)));
00060 #elif __TBB_DECLSPEC_ALIGN_PRESENT
00061 #define __TBB_DECL_ATOMIC_FIELD(t,f,a) __declspec(align(a)) t f;
00062 #else
00063 #error Do not know syntax for forcing alignment.
00064 #endif
00065
00066 template<size_t S>
00067 struct atomic_rep;
00068
00069 template<>
00070 struct atomic_rep<1> {
00071 typedef int8_t word;
00072 };
00073 template<>
00074 struct atomic_rep<2> {
00075 typedef int16_t word;
00076 };
00077 template<>
00078 struct atomic_rep<4> {
00079 #if _MSC_VER && !_WIN64
00080
00081 typedef intptr_t word;
00082 #else
00083 typedef int32_t word;
00084 #endif
00085 };
00086 #if __TBB_64BIT_ATOMICS
00087 template<>
00088 struct atomic_rep<8> {
00089 typedef int64_t word;
00090 };
00091 #endif
00092
00093 template<typename value_type, size_t size>
00094 struct aligned_storage;
00095
00096
00097 #if __TBB_ATOMIC_CTORS
00098 #define ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(S) \
00099 template<typename value_type> \
00100 struct aligned_storage<value_type,S> { \
00101 __TBB_DECL_ATOMIC_FIELD(value_type,my_value,S) \
00102 aligned_storage() = default ; \
00103 constexpr aligned_storage(value_type value):my_value(value){} \
00104 }; \
00105
00106 #else
00107 #define ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(S) \
00108 template<typename value_type> \
00109 struct aligned_storage<value_type,S> { \
00110 __TBB_DECL_ATOMIC_FIELD(value_type,my_value,S) \
00111 }; \
00112
00113 #endif
00114
00115 template<typename value_type>
00116 struct aligned_storage<value_type,1> {
00117 value_type my_value;
00118 #if __TBB_ATOMIC_CTORS
00119 aligned_storage() = default ;
00120 constexpr aligned_storage(value_type value):my_value(value){}
00121 #endif
00122 };
00123
00124 ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(2)
00125 ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(4)
00126 #if __TBB_64BIT_ATOMICS
00127 ATOMIC_STORAGE_PARTIAL_SPECIALIZATION(8)
00128 #endif
00129
00130 template<size_t Size, memory_semantics M>
00131 struct atomic_traits;
00132
00133 #define __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(S,M) \
00134 template<> struct atomic_traits<S,M> { \
00135 typedef atomic_rep<S>::word word; \
00136 inline static word compare_and_swap( volatile void* location, word new_value, word comparand ) { \
00137 return __TBB_machine_cmpswp##S##M(location,new_value,comparand); \
00138 } \
00139 inline static word fetch_and_add( volatile void* location, word addend ) { \
00140 return __TBB_machine_fetchadd##S##M(location,addend); \
00141 } \
00142 inline static word fetch_and_store( volatile void* location, word value ) { \
00143 return __TBB_machine_fetchstore##S##M(location,value); \
00144 } \
00145 };
00146
00147 #define __TBB_DECL_ATOMIC_PRIMITIVES(S) \
00148 template<memory_semantics M> \
00149 struct atomic_traits<S,M> { \
00150 typedef atomic_rep<S>::word word; \
00151 inline static word compare_and_swap( volatile void* location, word new_value, word comparand ) { \
00152 return __TBB_machine_cmpswp##S(location,new_value,comparand); \
00153 } \
00154 inline static word fetch_and_add( volatile void* location, word addend ) { \
00155 return __TBB_machine_fetchadd##S(location,addend); \
00156 } \
00157 inline static word fetch_and_store( volatile void* location, word value ) { \
00158 return __TBB_machine_fetchstore##S(location,value); \
00159 } \
00160 };
00161
00162 template<memory_semantics M>
00163 struct atomic_load_store_traits;
00164
00165 #define __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(M) \
00166 template<> struct atomic_load_store_traits<M> { \
00167 template <typename T> \
00168 inline static T load( const volatile T& location ) { \
00169 return __TBB_load_##M( location ); \
00170 } \
00171 template <typename T> \
00172 inline static void store( volatile T& location, T value ) { \
00173 __TBB_store_##M( location, value ); \
00174 } \
00175 }
00176
00177 #if __TBB_USE_FENCED_ATOMICS
00178 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,full_fence)
00179 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,full_fence)
00180 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,full_fence)
00181 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,acquire)
00182 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,acquire)
00183 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,acquire)
00184 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,release)
00185 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,release)
00186 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,release)
00187 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,relaxed)
00188 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,relaxed)
00189 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,relaxed)
00190 #if __TBB_64BIT_ATOMICS
00191 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,full_fence)
00192 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,acquire)
00193 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,release)
00194 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,relaxed)
00195 #endif
00196 #else
00197 __TBB_DECL_ATOMIC_PRIMITIVES(1)
00198 __TBB_DECL_ATOMIC_PRIMITIVES(2)
00199 __TBB_DECL_ATOMIC_PRIMITIVES(4)
00200 #if __TBB_64BIT_ATOMICS
00201 __TBB_DECL_ATOMIC_PRIMITIVES(8)
00202 #endif
00203 #endif
00204
00205 __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(full_fence);
00206 __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(acquire);
00207 __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(release);
00208 __TBB_DECL_ATOMIC_LOAD_STORE_PRIMITIVES(relaxed);
00209
00211
00213 #define __TBB_MINUS_ONE(T) (T(T(0)-T(1)))
00214
00216
00218 template<typename T>
00219 struct atomic_impl {
00220 protected:
00221 aligned_storage<T,sizeof(T)> my_storage;
00222 private:
00223
00225 template<typename value_type>
00226 union converter {
00227 typedef typename atomic_rep<sizeof(value_type)>::word bits_type;
00228 converter(){}
00229 converter(value_type a_value) : value(a_value) {}
00230 value_type value;
00231 bits_type bits;
00232 };
00233
00234 template<typename value_t>
00235 union ptr_converter;
00236
00237 template<typename value_t>
00238 union ptr_converter<value_t *> {
00239 typedef typename atomic_rep<sizeof(value_t)>::word * bits_ptr_type;
00240 ptr_converter(){}
00241 ptr_converter(value_t* a_value) : value(a_value) {}
00242 value_t* value;
00243 bits_ptr_type bits;
00244 };
00245
00246 template<typename value_t>
00247 static typename converter<value_t>::bits_type to_bits(value_t value){
00248 return converter<value_t>(value).bits;
00249 }
00250 template<typename value_t>
00251 static value_t to_value(typename converter<value_t>::bits_type bits){
00252 converter<value_t> u;
00253 u.bits = bits;
00254 return u.value;
00255 }
00256
00257
00258
00259 template<typename value_t>
00260 static typename ptr_converter<value_t*>::bits_ptr_type to_bits_ptr(value_t* value){
00261
00262
00263
00264
00265
00266 #if !__SUNPRO_CC
00267 return ptr_converter<value_t*>(value).bits;
00268 #else
00269 return typename ptr_converter<value_t*>::bits_ptr_type (value);
00270 #endif
00271 }
00272
00273 public:
00274 typedef T value_type;
00275
00276 #if __TBB_ATOMIC_CTORS
00277 atomic_impl() = default ;
00278 constexpr atomic_impl(value_type value):my_storage(value){}
00279 #endif
00280 template<memory_semantics M>
00281 value_type fetch_and_store( value_type value ) {
00282 return to_value<value_type>(internal::atomic_traits<sizeof(value_type),M>::fetch_and_store(&my_storage.my_value,to_bits(value)));
00283 }
00284
00285 value_type fetch_and_store( value_type value ) {
00286 return fetch_and_store<full_fence>(value);
00287 }
00288
00289 template<memory_semantics M>
00290 value_type compare_and_swap( value_type value, value_type comparand ) {
00291 return to_value<value_type>(internal::atomic_traits<sizeof(value_type),M>::compare_and_swap(&my_storage.my_value,to_bits(value),to_bits(comparand)));
00292 }
00293
00294 value_type compare_and_swap( value_type value, value_type comparand ) {
00295 return compare_and_swap<full_fence>(value,comparand);
00296 }
00297
00298 operator value_type() const volatile {
00299 return to_value<value_type>(__TBB_load_with_acquire(*to_bits_ptr(&my_storage.my_value)));
00300 }
00301
00302 template<memory_semantics M>
00303 value_type load () const {
00304 return to_value<value_type>(internal::atomic_load_store_traits<M>::load(*to_bits_ptr(&my_storage.my_value)));
00305 }
00306
00307 value_type load () const {
00308 return load<acquire>();
00309 }
00310
00311 template<memory_semantics M>
00312 void store ( value_type value ) {
00313 internal::atomic_load_store_traits<M>::store( *to_bits_ptr(&my_storage.my_value), to_bits(value));
00314 }
00315
00316 void store ( value_type value ) {
00317 store<release>( value );
00318 }
00319
00320 protected:
00321 value_type store_with_release( value_type rhs ) {
00322 __TBB_store_with_release(*to_bits_ptr(&my_storage.my_value),to_bits(rhs));
00323 return rhs;
00324 }
00325 };
00326
00328
00331 template<typename I, typename D, typename StepType>
00332 struct atomic_impl_with_arithmetic: atomic_impl<I> {
00333 public:
00334 typedef I value_type;
00335 #if __TBB_ATOMIC_CTORS
00336 atomic_impl_with_arithmetic() = default ;
00337 constexpr atomic_impl_with_arithmetic(value_type value): atomic_impl<I>(value){}
00338 #endif
00339 template<memory_semantics M>
00340 value_type fetch_and_add( D addend ) {
00341 return value_type(internal::atomic_traits<sizeof(value_type),M>::fetch_and_add( &this->my_storage.my_value, addend*sizeof(StepType) ));
00342 }
00343
00344 value_type fetch_and_add( D addend ) {
00345 return fetch_and_add<full_fence>(addend);
00346 }
00347
00348 template<memory_semantics M>
00349 value_type fetch_and_increment() {
00350 return fetch_and_add<M>(1);
00351 }
00352
00353 value_type fetch_and_increment() {
00354 return fetch_and_add(1);
00355 }
00356
00357 template<memory_semantics M>
00358 value_type fetch_and_decrement() {
00359 return fetch_and_add<M>(__TBB_MINUS_ONE(D));
00360 }
00361
00362 value_type fetch_and_decrement() {
00363 return fetch_and_add(__TBB_MINUS_ONE(D));
00364 }
00365
00366 public:
00367 value_type operator+=( D value ) {
00368 return fetch_and_add(value)+value;
00369 }
00370
00371 value_type operator-=( D value ) {
00372
00373
00374 return operator+=(D(0)-value);
00375 }
00376
00377 value_type operator++() {
00378 return fetch_and_add(1)+1;
00379 }
00380
00381 value_type operator--() {
00382 return fetch_and_add(__TBB_MINUS_ONE(D))-1;
00383 }
00384
00385 value_type operator++(int) {
00386 return fetch_and_add(1);
00387 }
00388
00389 value_type operator--(int) {
00390 return fetch_and_add(__TBB_MINUS_ONE(D));
00391 }
00392 };
00393
00394 }
00396
00398
00400 template<typename T>
00401 struct atomic: internal::atomic_impl<T> {
00402 #if __TBB_ATOMIC_CTORS
00403 atomic() = default;
00404 constexpr atomic(T arg): internal::atomic_impl<T>(arg) {}
00405 #endif
00406 T operator=( T rhs ) {
00407
00408 return this->store_with_release(rhs);
00409 }
00410 atomic<T>& operator=( const atomic<T>& rhs ) {this->store_with_release(rhs); return *this;}
00411 };
00412
00413 #if __TBB_ATOMIC_CTORS
00414 #define __TBB_DECL_ATOMIC(T) \
00415 template<> struct atomic<T>: internal::atomic_impl_with_arithmetic<T,T,char> { \
00416 atomic() = default; \
00417 constexpr atomic(T arg): internal::atomic_impl_with_arithmetic<T,T,char>(arg) {} \
00418 \
00419 T operator=( T rhs ) {return store_with_release(rhs);} \
00420 atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;} \
00421 };
00422 #else
00423 #define __TBB_DECL_ATOMIC(T) \
00424 template<> struct atomic<T>: internal::atomic_impl_with_arithmetic<T,T,char> { \
00425 T operator=( T rhs ) {return store_with_release(rhs);} \
00426 atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;} \
00427 };
00428 #endif
00429
00430 #if __TBB_64BIT_ATOMICS
00431
00432 __TBB_DECL_ATOMIC(__TBB_LONG_LONG)
00433 __TBB_DECL_ATOMIC(unsigned __TBB_LONG_LONG)
00434 #else
00435
00436 #endif
00437 __TBB_DECL_ATOMIC(long)
00438 __TBB_DECL_ATOMIC(unsigned long)
00439
00440 #if _MSC_VER && !_WIN64
00441 #if __TBB_ATOMIC_CTORS
00442
00443
00444
00445
00446
00447 #define __TBB_DECL_ATOMIC_ALT(T,U) \
00448 template<> struct atomic<T>: internal::atomic_impl_with_arithmetic<T,T,char> { \
00449 atomic() = default ; \
00450 constexpr atomic(T arg): internal::atomic_impl_with_arithmetic<T,T,char>(arg) {} \
00451 T operator=( U rhs ) {return store_with_release(T(rhs));} \
00452 atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;} \
00453 };
00454 #else
00455 #define __TBB_DECL_ATOMIC_ALT(T,U) \
00456 template<> struct atomic<T>: internal::atomic_impl_with_arithmetic<T,T,char> { \
00457 T operator=( U rhs ) {return store_with_release(T(rhs));} \
00458 atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;} \
00459 };
00460 #endif
00461 __TBB_DECL_ATOMIC_ALT(unsigned,size_t)
00462 __TBB_DECL_ATOMIC_ALT(int,ptrdiff_t)
00463 #else
00464 __TBB_DECL_ATOMIC(unsigned)
00465 __TBB_DECL_ATOMIC(int)
00466 #endif
00467
00468 __TBB_DECL_ATOMIC(unsigned short)
00469 __TBB_DECL_ATOMIC(short)
00470 __TBB_DECL_ATOMIC(char)
00471 __TBB_DECL_ATOMIC(signed char)
00472 __TBB_DECL_ATOMIC(unsigned char)
00473
00474 #if !_MSC_VER || defined(_NATIVE_WCHAR_T_DEFINED)
00475 __TBB_DECL_ATOMIC(wchar_t)
00476 #endif
00477
00479 template<typename T> struct atomic<T*>: internal::atomic_impl_with_arithmetic<T*,ptrdiff_t,T> {
00480 #if __TBB_ATOMIC_CTORS
00481 atomic() = default ;
00482 constexpr atomic(T* arg): internal::atomic_impl_with_arithmetic<T*,ptrdiff_t,T>(arg) {}
00483 #endif
00484 T* operator=( T* rhs ) {
00485
00486 return this->store_with_release(rhs);
00487 }
00488 atomic<T*>& operator=( const atomic<T*>& rhs ) {
00489 this->store_with_release(rhs); return *this;
00490 }
00491 T* operator->() const {
00492 return (*this);
00493 }
00494 };
00495
00497 template<> struct atomic<void*>: internal::atomic_impl<void*> {
00498 #if __TBB_ATOMIC_CTORS
00499 atomic() = default ;
00500 constexpr atomic(void* arg): internal::atomic_impl<void*>(arg) {}
00501 #endif
00502 void* operator=( void* rhs ) {
00503
00504 return this->store_with_release(rhs);
00505 }
00506 atomic<void*>& operator=( const atomic<void*>& rhs ) {
00507 this->store_with_release(rhs); return *this;
00508 }
00509 };
00510
00511
00512
00513
00514 template <memory_semantics M, typename T>
00515 T load ( const atomic<T>& a ) { return a.template load<M>(); }
00516
00517 template <memory_semantics M, typename T>
00518 void store ( atomic<T>& a, T value ) { return a.template store<M>(value); }
00519
00520 namespace interface6{
00522 template<typename T>
00523 atomic<T> make_atomic(T t) {
00524 atomic<T> a;
00525 store<relaxed>(a,t);
00526 return a;
00527 }
00528 }
00529 using interface6::make_atomic;
00530
00531 namespace internal {
00532
00533
00534 template<typename T>
00535 inline atomic<T>& as_atomic( T& t ) {
00536 return (atomic<T>&)t;
00537 }
00538 }
00539
00540 }
00541
00542 #if _MSC_VER && !__INTEL_COMPILER
00543 #pragma warning (pop)
00544 #endif // warnings 4244, 4267 are back
00545
00546 #endif