Main Page | Modules | Namespace List | Class Hierarchy | Class List | File List | Namespace Members | Class Members | Related Pages

tbb_machine.h

00001 /*
00002     Copyright 2005-2008 Intel Corporation.  All Rights Reserved.
00003 
00004     The source code contained or described herein and all documents related
00005     to the source code ("Material") are owned by Intel Corporation or its
00006     suppliers or licensors.  Title to the Material remains with Intel
00007     Corporation or its suppliers and licensors.  The Material is protected
00008     by worldwide copyright laws and treaty provisions.  No part of the
00009     Material may be used, copied, reproduced, modified, published, uploaded,
00010     posted, transmitted, distributed, or disclosed in any way without
00011     Intel's prior express written permission.
00012 
00013     No license under any patent, copyright, trade secret or other
00014     intellectual property right is granted to or conferred upon you by
00015     disclosure or delivery of the Materials, either expressly, by
00016     implication, inducement, estoppel or otherwise.  Any license under such
00017     intellectual property rights must be express and approved by Intel in
00018     writing.
00019 */
00020 
00021 #ifndef __TBB_machine_H
00022 #define __TBB_machine_H
00023 
00024 #include "tbb/tbb_stddef.h"
00025 
00026 #if _WIN32||_WIN64
00027 
00028 #ifdef _MANAGED
00029 #pragma managed(push, off)
00030 #endif
00031 
00032 #if defined(_M_IX86)
00033 #include "tbb/machine/windows_ia32.h"
00034 #elif defined(_M_AMD64) 
00035 #include "tbb/machine/windows_em64t.h"
00036 #else
00037 #error Unsupported platform
00038 #endif
00039 
00040 #ifdef _MANAGED
00041 #pragma managed(pop)
00042 #endif
00043 
00044 #elif __linux__ || __FreeBSD__
00045 
00046 #if __i386__
00047 #include "tbb/machine/linux_ia32.h"
00048 #elif __x86_64__
00049 #include "tbb/machine/linux_em64t.h"
00050 #elif __ia64__
00051 #include "tbb/machine/linux_itanium.h"
00052 #endif
00053 
00054 #elif __APPLE__
00055 
00056 #if __i386__
00057 #include "tbb/machine/linux_ia32.h"
00058 #elif __x86_64__
00059 #include "tbb/machine/linux_em64t.h"
00060 #elif __POWERPC__
00061 #include "tbb/machine/mac_ppc.h"
00062 #endif
00063 
00064 #elif _AIX
00065 
00066 #include "tbb/machine/ibm_aix51.h"
00067 
00068 #elif __sun || __SUNPRO_CC
00069 
00070 #define __asm__ asm 
00071 #define __volatile__ volatile
00072 #if __i386  || __i386__
00073 #include "tbb/machine/linux_ia32.h"
00074 #elif __x86_64__
00075 #include "tbb/machine/linux_em64t.h"
00076 #endif
00077 
00078 #endif
00079 
00080 #if !defined(__TBB_CompareAndSwap4) || !defined(__TBB_CompareAndSwap8) || !defined(__TBB_Yield)
00081 #error Minimal requirements for tbb_machine.h not satisfied 
00082 #endif
00083 
00084 #ifndef __TBB_load_with_acquire
00085 
00086     template<typename T>
00087     inline T __TBB_load_with_acquire(const volatile T& location) {
00088         T temp = location;
00089 #ifdef __TBB_fence_for_acquire 
00090         __TBB_fence_for_acquire();
00091 #endif /* __TBB_fence_for_acquire */
00092         return temp;
00093     }
00094 #endif
00095 
00096 #ifndef __TBB_store_with_release
00097 
00098     template<typename T, typename V>
00099     inline void __TBB_store_with_release(volatile T& location, V value) {
00100 #ifdef __TBB_fence_for_release
00101         __TBB_fence_for_release();
00102 #endif /* __TBB_fence_for_release */
00103         location = value; 
00104     }
00105 #endif
00106 
00107 #ifndef __TBB_Pause
00108     inline void __TBB_Pause(int32_t) {
00109         __TBB_Yield();
00110     }
00111 #endif
00112 
00113 namespace tbb {
00114 namespace internal {
00115 
00117 
00118 class AtomicBackoff {
00120 
00122     static const int32_t LOOPS_BEFORE_YIELD = 16;
00123     int32_t count;
00124 public:
00125     AtomicBackoff() : count(1) {}
00126 
00128     void pause() {
00129         if( count<=LOOPS_BEFORE_YIELD ) {
00130             __TBB_Pause(count);
00131             // Pause twice as long the next time.
00132             count*=2;
00133         } else {
00134             // Pause is so long that we might as well yield CPU to scheduler.
00135             __TBB_Yield();
00136         }
00137     }
00138 
00139     // pause for a few times and then return false immediately.
00140     bool bounded_pause() {
00141         if( count<=LOOPS_BEFORE_YIELD ) {
00142             __TBB_Pause(count);
00143             // Pause twice as long the next time.
00144             count*=2;
00145             return true;
00146         } else {
00147             return false;
00148         }
00149     }
00150 
00151     void reset() {
00152         count = 1;
00153     }
00154 };
00155 
00156 template<size_t S, typename T>
00157 inline intptr_t __TBB_MaskedCompareAndSwap (volatile int32_t *ptr, T value, T comparand ) {
00158     T *base = (T *)( (uintptr_t)(ptr) & ~(uintptr_t)(0x3) );
00159 #if __TBB_BIG_ENDIAN
00160     const uint8_t bitoffset = ( (4-S) - ( (uint8_t *)ptr - (uint8_t *)base) ) * 8;
00161 #else
00162     const uint8_t bitoffset = ( (uint8_t *)ptr - (uint8_t *)base ) * 8;
00163 #endif
00164     const uint32_t mask = ( (1<<(S*8) ) - 1)<<bitoffset;
00165     AtomicBackoff b;
00166     uint32_t result;
00167     for(;;) {
00168         result = *(volatile uint32_t *)base;
00169         uint32_t old_value = ( result & ~mask ) | ( comparand << bitoffset );
00170         uint32_t new_value = ( result & ~mask ) | ( value << bitoffset );
00171         // __TBB_CompareAndSwap4 presumed to have full fence. 
00172         uint32_t tmp = __TBB_CompareAndSwap4( base, new_value, old_value );
00173         if( tmp==old_value || ((tmp^old_value)&mask)!=0 ) 
00174             break;
00175         b.pause();
00176     }
00177     return (T)((result & mask) >> bitoffset);
00178 }
00179 
00180 template<size_t S, typename T>
00181 inline T __TBB_CompareAndSwapGeneric (volatile void *ptr, T value, T comparand ) { 
00182     return __TBB_CompareAndSwapW((T *)ptr,value,comparand);
00183 }
00184 
00185 template<>
00186 inline uint8_t __TBB_CompareAndSwapGeneric <1,uint8_t> (volatile void *ptr, uint8_t value, uint8_t comparand ) {
00187 #ifdef __TBB_CompareAndSwap1
00188     return __TBB_CompareAndSwap1(ptr,value,comparand);
00189 #else
00190     return __TBB_MaskedCompareAndSwap<1,uint8_t>((volatile int32_t *)ptr,value,comparand);
00191 #endif
00192 }
00193 
00194 template<>
00195 inline uint16_t __TBB_CompareAndSwapGeneric <2,uint16_t> (volatile void *ptr, uint16_t value, uint16_t comparand ) {
00196 #ifdef __TBB_CompareAndSwap2
00197     return __TBB_CompareAndSwap2(ptr,value,comparand);
00198 #else
00199     return __TBB_MaskedCompareAndSwap<2,uint16_t>((volatile int32_t *)ptr,value,comparand);
00200 #endif
00201 }
00202 
00203 template<>
00204 inline uint32_t __TBB_CompareAndSwapGeneric <4,uint32_t> (volatile void *ptr, uint32_t value, uint32_t comparand ) { 
00205     return __TBB_CompareAndSwap4(ptr,value,comparand);
00206 }
00207 
00208 template<>
00209 inline uint64_t __TBB_CompareAndSwapGeneric <8,uint64_t> (volatile void *ptr, uint64_t value, uint64_t comparand ) { 
00210     return __TBB_CompareAndSwap8(ptr,value,comparand);
00211 }
00212 
00213 template<size_t S, typename T>
00214 inline T __TBB_FetchAndAddGeneric (volatile void *ptr, T addend) {
00215     AtomicBackoff b;
00216     T result;
00217     for(;;) {
00218         result = *reinterpret_cast<volatile T *>(ptr);
00219         // __TBB_CompareAndSwapGeneric presumed to have full fence. 
00220         if( __TBB_CompareAndSwapGeneric<S,T> ( ptr, result+addend, result )==result ) 
00221             break;
00222         b.pause();
00223     }
00224     return result;
00225 }
00226 
00227 template<size_t S, typename T>
00228 inline T __TBB_FetchAndStoreGeneric (volatile void *ptr, T value) {
00229     AtomicBackoff b;
00230     T result;
00231     for(;;) {
00232         result = *reinterpret_cast<volatile T *>(ptr);
00233         // __TBB_CompareAndSwapGeneric presumed to have full fence.
00234         if( __TBB_CompareAndSwapGeneric<S,T> ( ptr, value, result )==result ) 
00235             break;
00236         b.pause();
00237     }
00238     return result;
00239 }
00240 
00241 // Macro __TBB_TypeWithAlignmentAtLeastAsStrict(T) should be a type with alignment at least as 
00242 // strict as type T.  Type type should have a trivial default constructor and destructor, so that
00243 // arrays of that type can be declared without initializers.  
00244 // It is correct (but perhaps a waste of space) if __TBB_TypeWithAlignmentAtLeastAsStrict(T) expands
00245 // to a type bigger than T.
00246 // The default definition here works on machines where integers are naturally aligned and the
00247 // strictest alignment is 16.
00248 #ifndef __TBB_TypeWithAlignmentAtLeastAsStrict
00249 
00250 #if __GNUC__ || __SUNPRO_CC
00251 struct __TBB_machine_type_with_strictest_alignment {
00252     int member[4];
00253 } __attribute__((aligned(16)));
00254 #elif _MSC_VER
00255 __declspec(align(16)) struct __TBB_machine_type_with_strictest_alignment {
00256     int member[4];
00257 };
00258 #else
00259 #error Must define __TBB_TypeWithAlignmentAtLeastAsStrict(T) or __TBB_machine_type_with_strictest_alignment
00260 #endif
00261 
00262 template<size_t N> struct type_with_alignment {__TBB_machine_type_with_strictest_alignment member;};
00263 template<> struct type_with_alignment<1> { char member; };
00264 template<> struct type_with_alignment<2> { uint16_t member; };
00265 template<> struct type_with_alignment<4> { uint32_t member; };
00266 template<> struct type_with_alignment<8> { uint64_t member; };
00267 
00268 #if _MSC_VER||defined(__GNUC__)&&__GNUC__==3 && __GNUC_MINOR__<=2  
00269 
00270 
00272 template<size_t Size, typename T> 
00273 struct work_around_alignment_bug {
00274 #if _MSC_VER
00275     static const size_t alignment = __alignof(T);
00276 #else
00277     static const size_t alignment = __alignof__(T);
00278 #endif
00279 };
00280 #define __TBB_TypeWithAlignmentAtLeastAsStrict(T) tbb::internal::type_with_alignment<tbb::internal::work_around_alignment_bug<sizeof(T),T>::alignment>
00281 #elif __GNUC__ || __SUNPRO_CC
00282 #define __TBB_TypeWithAlignmentAtLeastAsStrict(T) tbb::internal::type_with_alignment<__alignof__(T)>
00283 #else
00284 #define __TBB_TypeWithAlignmentAtLeastAsStrict(T) __TBB_machine_type_with_strictest_alignment
00285 #endif
00286 #endif  /* ____TBB_TypeWithAlignmentAtLeastAsStrict */
00287 
00288 } // namespace internal
00289 } // namespace tbb
00290 
00291 #ifndef __TBB_CompareAndSwap1
00292 #define __TBB_CompareAndSwap1 tbb::internal::__TBB_CompareAndSwapGeneric<1,uint8_t>
00293 #endif
00294 
00295 #ifndef __TBB_CompareAndSwap2 
00296 #define __TBB_CompareAndSwap2 tbb::internal::__TBB_CompareAndSwapGeneric<2,uint16_t>
00297 #endif
00298 
00299 #ifndef __TBB_CompareAndSwapW
00300 #define __TBB_CompareAndSwapW tbb::internal::__TBB_CompareAndSwapGeneric<sizeof(ptrdiff_t),ptrdiff_t>
00301 #endif
00302 
00303 #ifndef __TBB_FetchAndAdd1
00304 #define __TBB_FetchAndAdd1 tbb::internal::__TBB_FetchAndAddGeneric<1,uint8_t>
00305 #endif
00306 
00307 #ifndef __TBB_FetchAndAdd2
00308 #define __TBB_FetchAndAdd2 tbb::internal::__TBB_FetchAndAddGeneric<2,uint16_t>
00309 #endif
00310 
00311 #ifndef __TBB_FetchAndAdd4
00312 #define __TBB_FetchAndAdd4 tbb::internal::__TBB_FetchAndAddGeneric<4,uint32_t>
00313 #endif
00314 
00315 #ifndef __TBB_FetchAndAdd8
00316 #define __TBB_FetchAndAdd8 tbb::internal::__TBB_FetchAndAddGeneric<8,uint64_t>
00317 #endif
00318 
00319 #ifndef __TBB_FetchAndAddW
00320 #define __TBB_FetchAndAddW tbb::internal::__TBB_FetchAndAddGeneric<sizeof(ptrdiff_t),ptrdiff_t>
00321 #endif
00322 
00323 #ifndef __TBB_FetchAndStore1
00324 #define __TBB_FetchAndStore1 tbb::internal::__TBB_FetchAndStoreGeneric<1,uint8_t>
00325 #endif
00326 
00327 #ifndef __TBB_FetchAndStore2
00328 #define __TBB_FetchAndStore2 tbb::internal::__TBB_FetchAndStoreGeneric<2,uint16_t>
00329 #endif
00330 
00331 #ifndef __TBB_FetchAndStore4
00332 #define __TBB_FetchAndStore4 tbb::internal::__TBB_FetchAndStoreGeneric<4,uint32_t>
00333 #endif
00334 
00335 #ifndef __TBB_FetchAndStore8
00336 #define __TBB_FetchAndStore8 tbb::internal::__TBB_FetchAndStoreGeneric<8,uint64_t>
00337 #endif
00338 
00339 #ifndef __TBB_FetchAndStoreW
00340 #define __TBB_FetchAndStoreW tbb::internal::__TBB_FetchAndStoreGeneric<sizeof(ptrdiff_t),ptrdiff_t>
00341 #endif
00342 
00343 #if __TBB_DECL_FENCED_ATOMICS
00344 
00345 #ifndef __TBB_CompareAndSwap1__TBB_full_fence
00346 #define __TBB_CompareAndSwap1__TBB_full_fence __TBB_CompareAndSwap1
00347 #endif 
00348 #ifndef __TBB_CompareAndSwap1acquire
00349 #define __TBB_CompareAndSwap1acquire __TBB_CompareAndSwap1__TBB_full_fence
00350 #endif 
00351 #ifndef __TBB_CompareAndSwap1release
00352 #define __TBB_CompareAndSwap1release __TBB_CompareAndSwap1__TBB_full_fence
00353 #endif 
00354 
00355 #ifndef __TBB_CompareAndSwap2__TBB_full_fence
00356 #define __TBB_CompareAndSwap2__TBB_full_fence __TBB_CompareAndSwap2
00357 #endif
00358 #ifndef __TBB_CompareAndSwap2acquire
00359 #define __TBB_CompareAndSwap2acquire __TBB_CompareAndSwap2__TBB_full_fence
00360 #endif
00361 #ifndef __TBB_CompareAndSwap2release
00362 #define __TBB_CompareAndSwap2release __TBB_CompareAndSwap2__TBB_full_fence
00363 #endif
00364 
00365 #ifndef __TBB_CompareAndSwap4__TBB_full_fence
00366 #define __TBB_CompareAndSwap4__TBB_full_fence __TBB_CompareAndSwap4
00367 #endif 
00368 #ifndef __TBB_CompareAndSwap4acquire
00369 #define __TBB_CompareAndSwap4acquire __TBB_CompareAndSwap4__TBB_full_fence
00370 #endif 
00371 #ifndef __TBB_CompareAndSwap4release
00372 #define __TBB_CompareAndSwap4release __TBB_CompareAndSwap4__TBB_full_fence
00373 #endif 
00374 
00375 #ifndef __TBB_CompareAndSwap8__TBB_full_fence
00376 #define __TBB_CompareAndSwap8__TBB_full_fence __TBB_CompareAndSwap8
00377 #endif
00378 #ifndef __TBB_CompareAndSwap8acquire
00379 #define __TBB_CompareAndSwap8acquire __TBB_CompareAndSwap8__TBB_full_fence
00380 #endif
00381 #ifndef __TBB_CompareAndSwap8release
00382 #define __TBB_CompareAndSwap8release __TBB_CompareAndSwap8__TBB_full_fence
00383 #endif
00384 
00385 #ifndef __TBB_FetchAndAdd1__TBB_full_fence
00386 #define __TBB_FetchAndAdd1__TBB_full_fence __TBB_FetchAndAdd1
00387 #endif
00388 #ifndef __TBB_FetchAndAdd1acquire
00389 #define __TBB_FetchAndAdd1acquire __TBB_FetchAndAdd1__TBB_full_fence
00390 #endif
00391 #ifndef __TBB_FetchAndAdd1release
00392 #define __TBB_FetchAndAdd1release __TBB_FetchAndAdd1__TBB_full_fence
00393 #endif
00394 
00395 #ifndef __TBB_FetchAndAdd2__TBB_full_fence
00396 #define __TBB_FetchAndAdd2__TBB_full_fence __TBB_FetchAndAdd2
00397 #endif
00398 #ifndef __TBB_FetchAndAdd2acquire
00399 #define __TBB_FetchAndAdd2acquire __TBB_FetchAndAdd2__TBB_full_fence
00400 #endif
00401 #ifndef __TBB_FetchAndAdd2release
00402 #define __TBB_FetchAndAdd2release __TBB_FetchAndAdd2__TBB_full_fence
00403 #endif
00404 
00405 #ifndef __TBB_FetchAndAdd4__TBB_full_fence
00406 #define __TBB_FetchAndAdd4__TBB_full_fence __TBB_FetchAndAdd4
00407 #endif
00408 #ifndef __TBB_FetchAndAdd4acquire
00409 #define __TBB_FetchAndAdd4acquire __TBB_FetchAndAdd4__TBB_full_fence
00410 #endif
00411 #ifndef __TBB_FetchAndAdd4release
00412 #define __TBB_FetchAndAdd4release __TBB_FetchAndAdd4__TBB_full_fence
00413 #endif
00414 
00415 #ifndef __TBB_FetchAndAdd8__TBB_full_fence
00416 #define __TBB_FetchAndAdd8__TBB_full_fence __TBB_FetchAndAdd8
00417 #endif
00418 #ifndef __TBB_FetchAndAdd8acquire
00419 #define __TBB_FetchAndAdd8acquire __TBB_FetchAndAdd8__TBB_full_fence
00420 #endif
00421 #ifndef __TBB_FetchAndAdd8release
00422 #define __TBB_FetchAndAdd8release __TBB_FetchAndAdd8__TBB_full_fence
00423 #endif
00424 
00425 #ifndef __TBB_FetchAndStore1__TBB_full_fence
00426 #define __TBB_FetchAndStore1__TBB_full_fence __TBB_FetchAndStore1
00427 #endif
00428 #ifndef __TBB_FetchAndStore1acquire
00429 #define __TBB_FetchAndStore1acquire __TBB_FetchAndStore1__TBB_full_fence
00430 #endif
00431 #ifndef __TBB_FetchAndStore1release
00432 #define __TBB_FetchAndStore1release __TBB_FetchAndStore1__TBB_full_fence
00433 #endif
00434 
00435 #ifndef __TBB_FetchAndStore2__TBB_full_fence
00436 #define __TBB_FetchAndStore2__TBB_full_fence __TBB_FetchAndStore2
00437 #endif
00438 #ifndef __TBB_FetchAndStore2acquire
00439 #define __TBB_FetchAndStore2acquire __TBB_FetchAndStore2__TBB_full_fence
00440 #endif
00441 #ifndef __TBB_FetchAndStore2release
00442 #define __TBB_FetchAndStore2release __TBB_FetchAndStore2__TBB_full_fence
00443 #endif
00444 
00445 #ifndef __TBB_FetchAndStore4__TBB_full_fence
00446 #define __TBB_FetchAndStore4__TBB_full_fence __TBB_FetchAndStore4
00447 #endif
00448 #ifndef __TBB_FetchAndStore4acquire
00449 #define __TBB_FetchAndStore4acquire __TBB_FetchAndStore4__TBB_full_fence
00450 #endif
00451 #ifndef __TBB_FetchAndStore4release
00452 #define __TBB_FetchAndStore4release __TBB_FetchAndStore4__TBB_full_fence
00453 #endif
00454 
00455 #ifndef __TBB_FetchAndStore8__TBB_full_fence
00456 #define __TBB_FetchAndStore8__TBB_full_fence __TBB_FetchAndStore8
00457 #endif
00458 #ifndef __TBB_FetchAndStore8acquire
00459 #define __TBB_FetchAndStore8acquire __TBB_FetchAndStore8__TBB_full_fence
00460 #endif
00461 #ifndef __TBB_FetchAndStore8release
00462 #define __TBB_FetchAndStore8release __TBB_FetchAndStore8__TBB_full_fence
00463 #endif
00464 
00465 #endif // __TBB_DECL_FENCED_ATOMICS
00466 
00467 // Special atomic functions
00468 #ifndef __TBB_FetchAndAddWrelease
00469 #define __TBB_FetchAndAddWrelease __TBB_FetchAndAddW
00470 #endif
00471 
00472 #ifndef __TBB_FetchAndIncrementWacquire
00473 #define __TBB_FetchAndIncrementWacquire(P) __TBB_FetchAndAddW(P,1)
00474 #endif
00475 
00476 #ifndef __TBB_FetchAndDecrementWrelease
00477 #define __TBB_FetchAndDecrementWrelease(P) __TBB_FetchAndAddW(P,(-1))
00478 #endif
00479 
00480 #if __TBB_WORDSIZE==4
00481 // On 32-bit platforms, "atomic.h" requires definition of __TBB_Store8 and __TBB_Load8
00482 #ifndef __TBB_Store8
00483 inline void __TBB_Store8 (volatile void *ptr, int64_t value) {
00484     tbb::internal::AtomicBackoff b;
00485     for(;;) {
00486         int64_t result = *(int64_t *)ptr;
00487         if( __TBB_CompareAndSwap8(ptr,value,result)==result ) break;
00488         b.pause();
00489     }
00490 }
00491 #endif
00492 
00493 #ifndef __TBB_Load8
00494 inline int64_t __TBB_Load8 (const volatile void *ptr) {
00495     int64_t result = *(int64_t *)ptr;
00496     result = __TBB_CompareAndSwap8((volatile void *)ptr,result,result);
00497     return result;
00498 }
00499 #endif
00500 #endif /* __TBB_WORDSIZE==4 */
00501 
00502 #ifndef __TBB_Log2
00503 inline intptr_t __TBB_Log2( uintptr_t x ) {
00504     long result = -1;
00505     for(; x; x>>=1 ) ++result;
00506     return result;
00507 }
00508 #endif
00509 
00510 #ifndef __TBB_AtomicOR
00511 inline void __TBB_AtomicOR( volatile void *operand, uintptr_t addend ) {
00512     tbb::internal::AtomicBackoff b;
00513     for(;;) {
00514         uintptr_t tmp = *(volatile uintptr_t *)operand;
00515         uintptr_t result = __TBB_CompareAndSwapW(operand, tmp|addend, tmp);
00516         if( result==tmp ) break;
00517         b.pause();
00518     }
00519 }
00520 #endif
00521 
00522 #ifndef __TBB_AtomicAND
00523 inline void __TBB_AtomicAND( volatile void *operand, uintptr_t addend ) {
00524     tbb::internal::AtomicBackoff b;
00525     for(;;) {
00526         uintptr_t tmp = *(volatile uintptr_t *)operand;
00527         uintptr_t result = __TBB_CompareAndSwapW(operand, tmp&addend, tmp);
00528         if( result==tmp ) break;
00529         b.pause();
00530     }
00531 }
00532 #endif
00533 
00534 #ifndef __TBB_TryLockByte
00535 inline bool __TBB_TryLockByte( unsigned char &flag ) {
00536     return __TBB_CompareAndSwap1(&flag,1,0)==0;
00537 }
00538 #endif
00539 
00540 #ifndef __TBB_LockByte
00541 inline uintptr_t __TBB_LockByte( unsigned char& flag ) {
00542     if ( !__TBB_TryLockByte(flag) ) {
00543         tbb::internal::AtomicBackoff b;
00544         do {
00545             b.pause();
00546         } while ( !__TBB_TryLockByte(flag) );
00547     }
00548     return 0;
00549 }
00550 #endif
00551 
00552 #endif /* __TBB_machine_H */

Copyright © 2005-2008 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.