mirror of
https://github.com/gperftools/gperftools
synced 2025-02-28 01:50:28 +00:00
Fix thread-safety (annotalysis) annotations
tcmalloc contains some thread-safety annotations; however those annotations have not been exercised for some time, as they used macros/attributes only supported by a legacy branch of gcc. Pull request #1251 converted those macros to support modern versions of clang; this CR fixes the annotations that were enabled. For the most part, this just requires re-enabling annotations on member functions that take/release locks. For the tcmalloc fork (pre-fork and post-fork) handlers, we mark the functions as exempt from this analysis, as it takes a dynamic number of locks.
This commit is contained in:
parent
cc496aecb8
commit
fa412adfe3
@ -248,7 +248,7 @@ namespace {
|
||||
this->arena_->mu.Lock();
|
||||
}
|
||||
~ArenaLock() { RAW_CHECK(this->left_, "haven't left Arena region"); }
|
||||
void Leave() /*UNLOCK_FUNCTION()*/ {
|
||||
void Leave() UNLOCK_FUNCTION() {
|
||||
this->arena_->mu.Unlock();
|
||||
#if 0
|
||||
if (this->mask_valid_) {
|
||||
|
@ -63,9 +63,7 @@ class LOCKABLE SpinLock {
|
||||
}
|
||||
|
||||
// Acquire this SpinLock.
|
||||
// TODO(csilvers): uncomment the annotation when we figure out how to
|
||||
// support this macro with 0 args (see thread_annotations.h)
|
||||
inline void Lock() /*EXCLUSIVE_LOCK_FUNCTION()*/ {
|
||||
inline void Lock() EXCLUSIVE_LOCK_FUNCTION() {
|
||||
if (base::subtle::Acquire_CompareAndSwap(&lockword_, kSpinLockFree,
|
||||
kSpinLockHeld) != kSpinLockFree) {
|
||||
SlowLock();
|
||||
@ -84,9 +82,7 @@ class LOCKABLE SpinLock {
|
||||
}
|
||||
|
||||
// Release this SpinLock, which must be held by the calling thread.
|
||||
// TODO(csilvers): uncomment the annotation when we figure out how to
|
||||
// support this macro with 0 args (see thread_annotations.h)
|
||||
inline void Unlock() /*UNLOCK_FUNCTION()*/ {
|
||||
inline void Unlock() UNLOCK_FUNCTION() {
|
||||
uint64 prev_value = static_cast<uint64>(
|
||||
base::subtle::Release_AtomicExchange(&lockword_, kSpinLockFree));
|
||||
if (prev_value != kSpinLockHeld) {
|
||||
@ -127,9 +123,7 @@ class SCOPED_LOCKABLE SpinLockHolder {
|
||||
: lock_(l) {
|
||||
l->Lock();
|
||||
}
|
||||
// TODO(csilvers): uncomment the annotation when we figure out how to
|
||||
// support this macro with 0 args (see thread_annotations.h)
|
||||
inline ~SpinLockHolder() /*UNLOCK_FUNCTION()*/ { lock_->Unlock(); }
|
||||
inline ~SpinLockHolder() UNLOCK_FUNCTION() { lock_->Unlock(); }
|
||||
};
|
||||
// Catch bug where variable name is omitted, e.g. SpinLockHolder (&lock);
|
||||
#define SpinLockHolder(x) COMPILE_ASSERT(0, spin_lock_decl_missing_var_name)
|
||||
|
@ -82,11 +82,11 @@ class CentralFreeList {
|
||||
|
||||
// Lock/Unlock the internal SpinLock. Used on the pthread_atfork call
|
||||
// to set the lock in a consistent state before the fork.
|
||||
void Lock() {
|
||||
void Lock() EXCLUSIVE_LOCK_FUNCTION(lock_) {
|
||||
lock_.Lock();
|
||||
}
|
||||
|
||||
void Unlock() {
|
||||
void Unlock() UNLOCK_FUNCTION(lock_) {
|
||||
lock_.Unlock();
|
||||
}
|
||||
|
||||
|
@ -52,14 +52,14 @@ namespace tcmalloc {
|
||||
// sure the central_cache locks remain in a consisten state in the forked
|
||||
// version of the thread.
|
||||
|
||||
void CentralCacheLockAll()
|
||||
void CentralCacheLockAll() NO_THREAD_SAFETY_ANALYSIS
|
||||
{
|
||||
Static::pageheap_lock()->Lock();
|
||||
for (int i = 0; i < Static::num_size_classes(); ++i)
|
||||
Static::central_cache()[i].Lock();
|
||||
}
|
||||
|
||||
void CentralCacheUnlockAll()
|
||||
void CentralCacheUnlockAll() NO_THREAD_SAFETY_ANALYSIS
|
||||
{
|
||||
for (int i = 0; i < Static::num_size_classes(); ++i)
|
||||
Static::central_cache()[i].Unlock();
|
||||
|
Loading…
Reference in New Issue
Block a user