Intel(R) Threading Building Blocks Doxygen Documentation
version 4.2.3
|
Go to the documentation of this file.
59 while( !try_acquire_internal_lock() ) {
76 wait_for_release_of_internal_lock();
78 release_internal_lock();
81 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
83 #pragma warning (push)
84 #pragma warning (disable: 4311 4312)
93 template<memory_semantics M>
95 return reinterpret_cast<T*
>(
atomic_traits<
sizeof(T*),M>::fetch_and_add(location, addend) );
97 template<memory_semantics M>
99 return reinterpret_cast<T*
>(
atomic_traits<
sizeof(T*),M>::fetch_and_store(location,
reinterpret_cast<word>(
value)) );
101 template<memory_semantics M>
103 return reinterpret_cast<T*
>(
105 reinterpret_cast<word>(comparand))
113 return reinterpret_cast<T*
>(
reinterpret_cast<word>(ref) & operand2 );
116 return reinterpret_cast<T*
>(
reinterpret_cast<word>(ref) | operand2 );
122 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
124 #pragma warning (pop)
132 return uintptr_t(ptr) &
FLAG;
142 __TBB_ASSERT( !my_mutex,
"scoped_lock is already holding a mutex");
171 bool sync_prepare_done =
false;
174 unsigned short pred_state;
176 if( uintptr_t(pred) &
FLAG ) {
194 sync_prepare_done =
true;
205 if( !sync_prepare_done )
228 __TBB_ASSERT( !my_mutex,
"scoped_lock is already holding a mutex");
230 if( load<relaxed>(m.
q_tail) )
268 if(
this == my_mutex->q_tail.compare_and_swap<
tbb::release>(NULL,
this) ) {
278 acquire_internal_lock();
282 unblock_or_wait_on_internal_lock(
get_flag(tmp));
302 tmp = tricky_pointer::compare_and_swap<tbb::release>(&my_prev, pred,
tricky_pointer(pred) |
FLAG );
303 if( !(uintptr_t(tmp) &
FLAG) ) {
316 acquire_internal_lock();
328 tmp = tricky_pointer::fetch_and_store<tbb::release>(&(l_next->my_prev), pred);
337 acquire_internal_lock();
340 if(
this != my_mutex->q_tail.compare_and_swap<
tbb::release>(NULL,
this) ) {
348 tmp = tricky_pointer::fetch_and_store<tbb::release>(&(n->
my_prev), NULL);
352 unblock_or_wait_on_internal_lock(
get_flag(tmp));
368 if(
this==my_mutex->q_tail.load<
full_fence>() ) {
398 acquire_internal_lock();
402 n = tricky_pointer::fetch_and_add<tbb::acquire>(&my_next,
FLAG);
403 unsigned short n_state = n->
my_state;
407 tmp = tricky_pointer::fetch_and_store<tbb::release>(&(n->
my_prev),
this);
408 unblock_or_wait_on_internal_lock(
get_flag(tmp));
428 release_internal_lock();
440 pred = tricky_pointer::fetch_and_add<tbb::acquire>(&my_prev,
FLAG);
442 bool success = pred->try_acquire_internal_lock();
445 tmp = tricky_pointer::compare_and_swap<tbb::release>(&my_prev, pred,
tricky_pointer(pred)|
FLAG );
446 if( uintptr_t(tmp) &
FLAG ) {
451 pred->release_internal_lock();
455 pred->release_internal_lock();
469 wait_for_release_of_internal_lock();
tricky_atomic_pointer< queuing_rw_mutex::scoped_lock > tricky_pointer
T __TBB_load_with_acquire(const volatile T &location)
void release_internal_lock()
Release the internal lock.
bool try_acquire_internal_lock()
Try to acquire the internal lock.
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
void acquire_internal_lock()
Acquire the internal lock.
Class that implements exponential backoff.
The scoped locking pattern.
#define ITT_NOTIFY(name, obj)
Base class for types that should not be copied or assigned.
@ STATE_READER_UNBLOCKNEXT
scoped_lock *__TBB_atomic *__TBB_atomic my_next
@ STATE_COMBINED_WAITINGREADER
atomic_selector< sizeof(T *)>::word word
atomic< scoped_lock * > q_tail
The last competitor requesting the lock.
uintptr_t get_flag(queuing_rw_mutex::scoped_lock *ptr)
void spin_wait_while_eq(const volatile T &location, U value)
Spin WHILE the value of the variable is equal to a given value.
unsigned char my_internal_lock
A tiny internal lock.
scoped_lock *__TBB_atomic my_prev
The pointer to the previous and next competitors for a mutex.
bool upgrade_to_writer()
Upgrade reader to become a writer.
@ full_fence
Sequential consistency.
const unsigned char RELEASED
static T * fetch_and_add(T *volatile *location, word addend)
atomic< state_t > my_state
State of the request: reader, writer, active reader, other service states.
state_t_flags
Flag bits in a state_t that specify information about a locking request.
Queuing reader-writer mutex with local-only spinning.
void __TBB_store_relaxed(volatile T &location, V value)
static T * fetch_and_store(T *volatile *location, T *value)
#define ITT_SYNC_CREATE(obj, type, name)
void __TBB_store_with_release(volatile T &location, V value)
@ STATE_COMBINED_UPGRADING
void unblock_or_wait_on_internal_lock(uintptr_t)
A helper function.
@ STATE_UPGRADE_REQUESTED
#define __TBB_control_consistency_helper()
#define _T(string_literal)
Standard Windows style macro to markup the string literals.
void wait_for_release_of_internal_lock()
Wait for internal lock to be released.
static const tricky_pointer::word FLAG
Mask for low order bit of a pointer.
void __TBB_EXPORTED_METHOD internal_construct()
void __TBB_Pause(int32_t)
const unsigned char ACQUIRED
A view of a T* with additional functionality for twiddling low-order bits.
atomic< T > & as_atomic(T &t)
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long value
T * operator|(word operand2) const
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p sync_releasing
unsigned char __TBB_atomic my_going
The local spin-wait variable.
bool downgrade_to_reader()
Downgrade writer to become a reader.
void spin_wait_until_eq(const volatile T &location, const U value)
Spin UNTIL the value of the variable is equal to a given value.
bool try_acquire(queuing_rw_mutex &m, bool write=true)
Acquire lock on given mutex if free (i.e. non-blocking)
tricky_atomic_pointer(T *&original)
void acquire(queuing_rw_mutex &m, bool write=true)
Acquire lock on given mutex.
void release()
Release lock.
T __TBB_load_relaxed(const volatile T &location)
static T * compare_and_swap(T *volatile *location, T *value, T *comparand)
T * operator&(word operand2) const
tricky_atomic_pointer(T *volatile &original)
Copyright © 2005-2020 Intel Corporation. All Rights Reserved.
Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are
registered trademarks or trademarks of Intel Corporation or its
subsidiaries in the United States and other countries.
* Other names and brands may be claimed as the property of others.