tbb_machine.h

00001 /*
00002     Copyright 2005-2009 Intel Corporation.  All Rights Reserved.
00003 
00004     The source code contained or described herein and all documents related
00005     to the source code ("Material") are owned by Intel Corporation or its
00006     suppliers or licensors.  Title to the Material remains with Intel
00007     Corporation or its suppliers and licensors.  The Material is protected
00008     by worldwide copyright laws and treaty provisions.  No part of the
00009     Material may be used, copied, reproduced, modified, published, uploaded,
00010     posted, transmitted, distributed, or disclosed in any way without
00011     Intel's prior express written permission.
00012 
00013     No license under any patent, copyright, trade secret or other
00014     intellectual property right is granted to or conferred upon you by
00015     disclosure or delivery of the Materials, either expressly, by
00016     implication, inducement, estoppel or otherwise.  Any license under such
00017     intellectual property rights must be express and approved by Intel in
00018     writing.
00019 */
00020 
00021 #ifndef __TBB_machine_H
00022 #define __TBB_machine_H
00023 
00024 #include "tbb_stddef.h"
00025 
00026 #if _WIN32||_WIN64
00027 
00028 #ifdef _MANAGED
00029 #pragma managed(push, off)
00030 #endif
00031 
00032 #if defined(_M_IX86)
00033 #include "machine/windows_ia32.h"
00034 #elif defined(_M_AMD64) 
00035 #include "machine/windows_em64t.h"
00036 #else
00037 #error Unsupported platform
00038 #endif
00039 
00040 #ifdef _MANAGED
00041 #pragma managed(pop)
00042 #endif
00043 
00044 #elif __linux__ || __FreeBSD__
00045 
00046 #if __i386__
00047 #include "machine/linux_ia32.h"
00048 #elif __x86_64__
00049 #include "machine/linux_em64t.h"
00050 #elif __ia64__
00051 #include "machine/linux_itanium.h"
00052 #endif
00053 
00054 #elif __APPLE__
00055 
00056 #if __i386__
00057 #include "machine/linux_ia32.h"
00058 #elif __x86_64__
00059 #include "machine/linux_em64t.h"
00060 #elif __POWERPC__
00061 #include "machine/mac_ppc.h"
00062 #endif
00063 
00064 #elif _AIX
00065 
00066 #include "machine/ibm_aix51.h"
00067 
00068 #elif __sun || __SUNPRO_CC
00069 
00070 #define __asm__ asm 
00071 #define __volatile__ volatile
00072 #if __i386  || __i386__
00073 #include "machine/linux_ia32.h"
00074 #elif __x86_64__
00075 #include "machine/linux_em64t.h"
00076 #endif
00077 
00078 #endif
00079 
00080 #if !defined(__TBB_CompareAndSwap4) || !defined(__TBB_CompareAndSwap8) || !defined(__TBB_Yield)
00081 #error Minimal requirements for tbb_machine.h not satisfied 
00082 #endif
00083 
00084 #ifndef __TBB_load_with_acquire
00086     template<typename T>
00087     inline T __TBB_load_with_acquire(const volatile T& location) {
00088         T temp = location;
00089 #ifdef __TBB_fence_for_acquire 
00090         __TBB_fence_for_acquire();
00091 #endif /* __TBB_fence_for_acquire */
00092         return temp;
00093     }
00094 #endif
00095 
00096 #ifndef __TBB_store_with_release
00098     template<typename T, typename V>
00099     inline void __TBB_store_with_release(volatile T& location, V value) {
00100 #ifdef __TBB_fence_for_release
00101         __TBB_fence_for_release();
00102 #endif /* __TBB_fence_for_release */
00103         location = T(value); 
00104     }
00105 #endif
00106 
00107 #ifndef __TBB_Pause
00108     inline void __TBB_Pause(int32_t) {
00109         __TBB_Yield();
00110     }
00111 #endif
00112 
00113 namespace tbb {
00114 namespace internal {
00115 
00117 
00118 class AtomicBackoff {
00120 
00122     static const int32_t LOOPS_BEFORE_YIELD = 16;
00123     int32_t count;
00124 public:
00125     AtomicBackoff() : count(1) {}
00126 
00128     void pause() {
00129         if( count<=LOOPS_BEFORE_YIELD ) {
00130             __TBB_Pause(count);
00131             // Pause twice as long the next time.
00132             count*=2;
00133         } else {
00134             // Pause is so long that we might as well yield CPU to scheduler.
00135             __TBB_Yield();
00136         }
00137     }
00138 
00139     // pause for a few times and then return false immediately.
00140     bool bounded_pause() {
00141         if( count<=LOOPS_BEFORE_YIELD ) {
00142             __TBB_Pause(count);
00143             // Pause twice as long the next time.
00144             count*=2;
00145             return true;
00146         } else {
00147             return false;
00148         }
00149     }
00150 
00151     void reset() {
00152         count = 1;
00153     }
00154 };
00155 
00156 // T should be unsigned, otherwise sign propagation will break correctness of bit manipulations.
00157 // S should be either 1 or 2, for the mask calculation to work correctly.
00158 // Together, these rules limit applicability of Masked CAS to unsigned char and unsigned short.
00159 template<size_t S, typename T>
00160 inline T __TBB_MaskedCompareAndSwap (volatile T *ptr, T value, T comparand ) {
00161     volatile uint32_t * base = (uint32_t*)( (uintptr_t)ptr & ~(uintptr_t)0x3 );
00162 #if __TBB_BIG_ENDIAN
00163     const uint8_t bitoffset = uint8_t( 8*( 4-S - (uintptr_t(ptr) & 0x3) ) );
00164 #else
00165     const uint8_t bitoffset = uint8_t( 8*((uintptr_t)ptr & 0x3) );
00166 #endif
00167     const uint32_t mask = ( (1<<(S*8)) - 1 )<<bitoffset;
00168     AtomicBackoff b;
00169     uint32_t result;
00170     for(;;) {
00171         result = *base; // reload the base value which might change during the pause
00172         uint32_t old_value = ( result & ~mask ) | ( comparand << bitoffset );
00173         uint32_t new_value = ( result & ~mask ) | ( value << bitoffset );
00174         // __TBB_CompareAndSwap4 presumed to have full fence. 
00175         result = __TBB_CompareAndSwap4( base, new_value, old_value );
00176         if(  result==old_value               // CAS succeeded
00177           || ((result^old_value)&mask)!=0 )  // CAS failed and the bits of interest have changed
00178             break;
00179         else                                 // CAS failed but the bits of interest left unchanged
00180             b.pause();
00181     }
00182     return T((result & mask) >> bitoffset);
00183 }
00184 
00185 template<size_t S, typename T>
00186 inline T __TBB_CompareAndSwapGeneric (volatile void *ptr, T value, T comparand ) { 
00187     return __TBB_CompareAndSwapW((T *)ptr,value,comparand);
00188 }
00189 
00190 template<>
00191 inline uint8_t __TBB_CompareAndSwapGeneric <1,uint8_t> (volatile void *ptr, uint8_t value, uint8_t comparand ) {
00192 #ifdef __TBB_CompareAndSwap1
00193     return __TBB_CompareAndSwap1(ptr,value,comparand);
00194 #else
00195     return __TBB_MaskedCompareAndSwap<1,uint8_t>((volatile uint8_t *)ptr,value,comparand);
00196 #endif
00197 }
00198 
00199 template<>
00200 inline uint16_t __TBB_CompareAndSwapGeneric <2,uint16_t> (volatile void *ptr, uint16_t value, uint16_t comparand ) {
00201 #ifdef __TBB_CompareAndSwap2
00202     return __TBB_CompareAndSwap2(ptr,value,comparand);
00203 #else
00204     return __TBB_MaskedCompareAndSwap<2,uint16_t>((volatile uint16_t *)ptr,value,comparand);
00205 #endif
00206 }
00207 
00208 template<>
00209 inline uint32_t __TBB_CompareAndSwapGeneric <4,uint32_t> (volatile void *ptr, uint32_t value, uint32_t comparand ) { 
00210     return __TBB_CompareAndSwap4(ptr,value,comparand);
00211 }
00212 
00213 template<>
00214 inline uint64_t __TBB_CompareAndSwapGeneric <8,uint64_t> (volatile void *ptr, uint64_t value, uint64_t comparand ) { 
00215     return __TBB_CompareAndSwap8(ptr,value,comparand);
00216 }
00217 
00218 template<size_t S, typename T>
00219 inline T __TBB_FetchAndAddGeneric (volatile void *ptr, T addend) {
00220     AtomicBackoff b;
00221     T result;
00222     for(;;) {
00223         result = *reinterpret_cast<volatile T *>(ptr);
00224         // __TBB_CompareAndSwapGeneric presumed to have full fence. 
00225         if( __TBB_CompareAndSwapGeneric<S,T> ( ptr, result+addend, result )==result ) 
00226             break;
00227         b.pause();
00228     }
00229     return result;
00230 }
00231 
00232 template<size_t S, typename T>
00233 inline T __TBB_FetchAndStoreGeneric (volatile void *ptr, T value) {
00234     AtomicBackoff b;
00235     T result;
00236     for(;;) {
00237         result = *reinterpret_cast<volatile T *>(ptr);
00238         // __TBB_CompareAndSwapGeneric presumed to have full fence.
00239         if( __TBB_CompareAndSwapGeneric<S,T> ( ptr, value, result )==result ) 
00240             break;
00241         b.pause();
00242     }
00243     return result;
00244 }
00245 
00246 // Macro __TBB_TypeWithAlignmentAtLeastAsStrict(T) should be a type with alignment at least as 
00247 // strict as type T.  Type type should have a trivial default constructor and destructor, so that
00248 // arrays of that type can be declared without initializers.  
00249 // It is correct (but perhaps a waste of space) if __TBB_TypeWithAlignmentAtLeastAsStrict(T) expands
00250 // to a type bigger than T.
00251 // The default definition here works on machines where integers are naturally aligned and the
00252 // strictest alignment is 16.
00253 #ifndef __TBB_TypeWithAlignmentAtLeastAsStrict
00254 
00255 #if __GNUC__ || __SUNPRO_CC
00256 struct __TBB_machine_type_with_strictest_alignment {
00257     int member[4];
00258 } __attribute__((aligned(16)));
00259 #elif _MSC_VER
00260 __declspec(align(16)) struct __TBB_machine_type_with_strictest_alignment {
00261     int member[4];
00262 };
00263 #else
00264 #error Must define __TBB_TypeWithAlignmentAtLeastAsStrict(T) or __TBB_machine_type_with_strictest_alignment
00265 #endif
00266 
00267 template<size_t N> struct type_with_alignment {__TBB_machine_type_with_strictest_alignment member;};
00268 template<> struct type_with_alignment<1> { char member; };
00269 template<> struct type_with_alignment<2> { uint16_t member; };
00270 template<> struct type_with_alignment<4> { uint32_t member; };
00271 template<> struct type_with_alignment<8> { uint64_t member; };
00272 
00273 #if _MSC_VER||defined(__GNUC__)&&__GNUC__==3 && __GNUC_MINOR__<=2  
00275 
00277 template<size_t Size, typename T> 
00278 struct work_around_alignment_bug {
00279 #if _MSC_VER
00280     static const size_t alignment = __alignof(T);
00281 #else
00282     static const size_t alignment = __alignof__(T);
00283 #endif
00284 };
00285 #define __TBB_TypeWithAlignmentAtLeastAsStrict(T) tbb::internal::type_with_alignment<tbb::internal::work_around_alignment_bug<sizeof(T),T>::alignment>
00286 #elif __GNUC__ || __SUNPRO_CC
00287 #define __TBB_TypeWithAlignmentAtLeastAsStrict(T) tbb::internal::type_with_alignment<__alignof__(T)>
00288 #else
00289 #define __TBB_TypeWithAlignmentAtLeastAsStrict(T) __TBB_machine_type_with_strictest_alignment
00290 #endif
00291 #endif  /* ____TBB_TypeWithAlignmentAtLeastAsStrict */
00292 
00293 } // namespace internal
00294 } // namespace tbb
00295 
00296 #ifndef __TBB_CompareAndSwap1
00297 #define __TBB_CompareAndSwap1 tbb::internal::__TBB_CompareAndSwapGeneric<1,uint8_t>
00298 #endif
00299 
00300 #ifndef __TBB_CompareAndSwap2 
00301 #define __TBB_CompareAndSwap2 tbb::internal::__TBB_CompareAndSwapGeneric<2,uint16_t>
00302 #endif
00303 
00304 #ifndef __TBB_CompareAndSwapW
00305 #define __TBB_CompareAndSwapW tbb::internal::__TBB_CompareAndSwapGeneric<sizeof(ptrdiff_t),ptrdiff_t>
00306 #endif
00307 
00308 #ifndef __TBB_FetchAndAdd1
00309 #define __TBB_FetchAndAdd1 tbb::internal::__TBB_FetchAndAddGeneric<1,uint8_t>
00310 #endif
00311 
00312 #ifndef __TBB_FetchAndAdd2
00313 #define __TBB_FetchAndAdd2 tbb::internal::__TBB_FetchAndAddGeneric<2,uint16_t>
00314 #endif
00315 
00316 #ifndef __TBB_FetchAndAdd4
00317 #define __TBB_FetchAndAdd4 tbb::internal::__TBB_FetchAndAddGeneric<4,uint32_t>
00318 #endif
00319 
00320 #ifndef __TBB_FetchAndAdd8
00321 #define __TBB_FetchAndAdd8 tbb::internal::__TBB_FetchAndAddGeneric<8,uint64_t>
00322 #endif
00323 
00324 #ifndef __TBB_FetchAndAddW
00325 #define __TBB_FetchAndAddW tbb::internal::__TBB_FetchAndAddGeneric<sizeof(ptrdiff_t),ptrdiff_t>
00326 #endif
00327 
00328 #ifndef __TBB_FetchAndStore1
00329 #define __TBB_FetchAndStore1 tbb::internal::__TBB_FetchAndStoreGeneric<1,uint8_t>
00330 #endif
00331 
00332 #ifndef __TBB_FetchAndStore2
00333 #define __TBB_FetchAndStore2 tbb::internal::__TBB_FetchAndStoreGeneric<2,uint16_t>
00334 #endif
00335 
00336 #ifndef __TBB_FetchAndStore4
00337 #define __TBB_FetchAndStore4 tbb::internal::__TBB_FetchAndStoreGeneric<4,uint32_t>
00338 #endif
00339 
00340 #ifndef __TBB_FetchAndStore8
00341 #define __TBB_FetchAndStore8 tbb::internal::__TBB_FetchAndStoreGeneric<8,uint64_t>
00342 #endif
00343 
00344 #ifndef __TBB_FetchAndStoreW
00345 #define __TBB_FetchAndStoreW tbb::internal::__TBB_FetchAndStoreGeneric<sizeof(ptrdiff_t),ptrdiff_t>
00346 #endif
00347 
00348 #if __TBB_DECL_FENCED_ATOMICS
00349 
00350 #ifndef __TBB_CompareAndSwap1__TBB_full_fence
00351 #define __TBB_CompareAndSwap1__TBB_full_fence __TBB_CompareAndSwap1
00352 #endif 
00353 #ifndef __TBB_CompareAndSwap1acquire
00354 #define __TBB_CompareAndSwap1acquire __TBB_CompareAndSwap1__TBB_full_fence
00355 #endif 
00356 #ifndef __TBB_CompareAndSwap1release
00357 #define __TBB_CompareAndSwap1release __TBB_CompareAndSwap1__TBB_full_fence
00358 #endif 
00359 
00360 #ifndef __TBB_CompareAndSwap2__TBB_full_fence
00361 #define __TBB_CompareAndSwap2__TBB_full_fence __TBB_CompareAndSwap2
00362 #endif
00363 #ifndef __TBB_CompareAndSwap2acquire
00364 #define __TBB_CompareAndSwap2acquire __TBB_CompareAndSwap2__TBB_full_fence
00365 #endif
00366 #ifndef __TBB_CompareAndSwap2release
00367 #define __TBB_CompareAndSwap2release __TBB_CompareAndSwap2__TBB_full_fence
00368 #endif
00369 
00370 #ifndef __TBB_CompareAndSwap4__TBB_full_fence
00371 #define __TBB_CompareAndSwap4__TBB_full_fence __TBB_CompareAndSwap4
00372 #endif 
00373 #ifndef __TBB_CompareAndSwap4acquire
00374 #define __TBB_CompareAndSwap4acquire __TBB_CompareAndSwap4__TBB_full_fence
00375 #endif 
00376 #ifndef __TBB_CompareAndSwap4release
00377 #define __TBB_CompareAndSwap4release __TBB_CompareAndSwap4__TBB_full_fence
00378 #endif 
00379 
00380 #ifndef __TBB_CompareAndSwap8__TBB_full_fence
00381 #define __TBB_CompareAndSwap8__TBB_full_fence __TBB_CompareAndSwap8
00382 #endif
00383 #ifndef __TBB_CompareAndSwap8acquire
00384 #define __TBB_CompareAndSwap8acquire __TBB_CompareAndSwap8__TBB_full_fence
00385 #endif
00386 #ifndef __TBB_CompareAndSwap8release
00387 #define __TBB_CompareAndSwap8release __TBB_CompareAndSwap8__TBB_full_fence
00388 #endif
00389 
00390 #ifndef __TBB_FetchAndAdd1__TBB_full_fence
00391 #define __TBB_FetchAndAdd1__TBB_full_fence __TBB_FetchAndAdd1
00392 #endif
00393 #ifndef __TBB_FetchAndAdd1acquire
00394 #define __TBB_FetchAndAdd1acquire __TBB_FetchAndAdd1__TBB_full_fence
00395 #endif
00396 #ifndef __TBB_FetchAndAdd1release
00397 #define __TBB_FetchAndAdd1release __TBB_FetchAndAdd1__TBB_full_fence
00398 #endif
00399 
00400 #ifndef __TBB_FetchAndAdd2__TBB_full_fence
00401 #define __TBB_FetchAndAdd2__TBB_full_fence __TBB_FetchAndAdd2
00402 #endif
00403 #ifndef __TBB_FetchAndAdd2acquire
00404 #define __TBB_FetchAndAdd2acquire __TBB_FetchAndAdd2__TBB_full_fence
00405 #endif
00406 #ifndef __TBB_FetchAndAdd2release
00407 #define __TBB_FetchAndAdd2release __TBB_FetchAndAdd2__TBB_full_fence
00408 #endif
00409 
00410 #ifndef __TBB_FetchAndAdd4__TBB_full_fence
00411 #define __TBB_FetchAndAdd4__TBB_full_fence __TBB_FetchAndAdd4
00412 #endif
00413 #ifndef __TBB_FetchAndAdd4acquire
00414 #define __TBB_FetchAndAdd4acquire __TBB_FetchAndAdd4__TBB_full_fence
00415 #endif
00416 #ifndef __TBB_FetchAndAdd4release
00417 #define __TBB_FetchAndAdd4release __TBB_FetchAndAdd4__TBB_full_fence
00418 #endif
00419 
00420 #ifndef __TBB_FetchAndAdd8__TBB_full_fence
00421 #define __TBB_FetchAndAdd8__TBB_full_fence __TBB_FetchAndAdd8
00422 #endif
00423 #ifndef __TBB_FetchAndAdd8acquire
00424 #define __TBB_FetchAndAdd8acquire __TBB_FetchAndAdd8__TBB_full_fence
00425 #endif
00426 #ifndef __TBB_FetchAndAdd8release
00427 #define __TBB_FetchAndAdd8release __TBB_FetchAndAdd8__TBB_full_fence
00428 #endif
00429 
00430 #ifndef __TBB_FetchAndStore1__TBB_full_fence
00431 #define __TBB_FetchAndStore1__TBB_full_fence __TBB_FetchAndStore1
00432 #endif
00433 #ifndef __TBB_FetchAndStore1acquire
00434 #define __TBB_FetchAndStore1acquire __TBB_FetchAndStore1__TBB_full_fence
00435 #endif
00436 #ifndef __TBB_FetchAndStore1release
00437 #define __TBB_FetchAndStore1release __TBB_FetchAndStore1__TBB_full_fence
00438 #endif
00439 
00440 #ifndef __TBB_FetchAndStore2__TBB_full_fence
00441 #define __TBB_FetchAndStore2__TBB_full_fence __TBB_FetchAndStore2
00442 #endif
00443 #ifndef __TBB_FetchAndStore2acquire
00444 #define __TBB_FetchAndStore2acquire __TBB_FetchAndStore2__TBB_full_fence
00445 #endif
00446 #ifndef __TBB_FetchAndStore2release
00447 #define __TBB_FetchAndStore2release __TBB_FetchAndStore2__TBB_full_fence
00448 #endif
00449 
00450 #ifndef __TBB_FetchAndStore4__TBB_full_fence
00451 #define __TBB_FetchAndStore4__TBB_full_fence __TBB_FetchAndStore4
00452 #endif
00453 #ifndef __TBB_FetchAndStore4acquire
00454 #define __TBB_FetchAndStore4acquire __TBB_FetchAndStore4__TBB_full_fence
00455 #endif
00456 #ifndef __TBB_FetchAndStore4release
00457 #define __TBB_FetchAndStore4release __TBB_FetchAndStore4__TBB_full_fence
00458 #endif
00459 
00460 #ifndef __TBB_FetchAndStore8__TBB_full_fence
00461 #define __TBB_FetchAndStore8__TBB_full_fence __TBB_FetchAndStore8
00462 #endif
00463 #ifndef __TBB_FetchAndStore8acquire
00464 #define __TBB_FetchAndStore8acquire __TBB_FetchAndStore8__TBB_full_fence
00465 #endif
00466 #ifndef __TBB_FetchAndStore8release
00467 #define __TBB_FetchAndStore8release __TBB_FetchAndStore8__TBB_full_fence
00468 #endif
00469 
00470 #endif // __TBB_DECL_FENCED_ATOMICS
00471 
00472 // Special atomic functions
00473 #ifndef __TBB_FetchAndAddWrelease
00474 #define __TBB_FetchAndAddWrelease __TBB_FetchAndAddW
00475 #endif
00476 
00477 #ifndef __TBB_FetchAndIncrementWacquire
00478 #define __TBB_FetchAndIncrementWacquire(P) __TBB_FetchAndAddW(P,1)
00479 #endif
00480 
00481 #ifndef __TBB_FetchAndDecrementWrelease
00482 #define __TBB_FetchAndDecrementWrelease(P) __TBB_FetchAndAddW(P,(-1))
00483 #endif
00484 
00485 #if __TBB_WORDSIZE==4
00486 // On 32-bit platforms, "atomic.h" requires definition of __TBB_Store8 and __TBB_Load8
00487 #ifndef __TBB_Store8
00488 inline void __TBB_Store8 (volatile void *ptr, int64_t value) {
00489     tbb::internal::AtomicBackoff b;
00490     for(;;) {
00491         int64_t result = *(int64_t *)ptr;
00492         if( __TBB_CompareAndSwap8(ptr,value,result)==result ) break;
00493         b.pause();
00494     }
00495 }
00496 #endif
00497 
00498 #ifndef __TBB_Load8
00499 inline int64_t __TBB_Load8 (const volatile void *ptr) {
00500     int64_t result = *(int64_t *)ptr;
00501     result = __TBB_CompareAndSwap8((volatile void *)ptr,result,result);
00502     return result;
00503 }
00504 #endif
00505 #endif /* __TBB_WORDSIZE==4 */
00506 
00507 #ifndef __TBB_Log2
00508 inline intptr_t __TBB_Log2( uintptr_t x ) {
00509     if( x==0 ) return -1;
00510     intptr_t result = 0;
00511     uintptr_t tmp;
00512 #if __TBB_WORDSIZE>=8
00513     if( (tmp = x>>32) ) { x=tmp; result += 32; }
00514 #endif
00515     if( (tmp = x>>16) ) { x=tmp; result += 16; }
00516     if( (tmp = x>>8) )  { x=tmp; result += 8; }
00517     if( (tmp = x>>4) )  { x=tmp; result += 4; }
00518     if( (tmp = x>>2) )  { x=tmp; result += 2; }
00519     return (x&2)? result+1: result;
00520 }
00521 #endif
00522 
00523 #ifndef __TBB_AtomicOR
00524 inline void __TBB_AtomicOR( volatile void *operand, uintptr_t addend ) {
00525     tbb::internal::AtomicBackoff b;
00526     for(;;) {
00527         uintptr_t tmp = *(volatile uintptr_t *)operand;
00528         uintptr_t result = __TBB_CompareAndSwapW(operand, tmp|addend, tmp);
00529         if( result==tmp ) break;
00530         b.pause();
00531     }
00532 }
00533 #endif
00534 
00535 #ifndef __TBB_AtomicAND
00536 inline void __TBB_AtomicAND( volatile void *operand, uintptr_t addend ) {
00537     tbb::internal::AtomicBackoff b;
00538     for(;;) {
00539         uintptr_t tmp = *(volatile uintptr_t *)operand;
00540         uintptr_t result = __TBB_CompareAndSwapW(operand, tmp&addend, tmp);
00541         if( result==tmp ) break;
00542         b.pause();
00543     }
00544 }
00545 #endif
00546 
00547 #ifndef __TBB_TryLockByte
00548 inline bool __TBB_TryLockByte( unsigned char &flag ) {
00549     return __TBB_CompareAndSwap1(&flag,1,0)==0;
00550 }
00551 #endif
00552 
00553 #ifndef __TBB_LockByte
00554 inline uintptr_t __TBB_LockByte( unsigned char& flag ) {
00555     if ( !__TBB_TryLockByte(flag) ) {
00556         tbb::internal::AtomicBackoff b;
00557         do {
00558             b.pause();
00559         } while ( !__TBB_TryLockByte(flag) );
00560     }
00561     return 0;
00562 }
00563 #endif
00564 
00565 #endif /* __TBB_machine_H */

Copyright © 2005-2009 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.