From 54eeb8ee9a57850aca5056a5834bfb86a210bf1a Mon Sep 17 00:00:00 2001 From: Tom Date: Tue, 29 Dec 2020 13:14:21 -0700 Subject: [PATCH] AK: Fix a race condition with WeakPtr::strong_ref and destruction Since RefPtr decrements the ref counter to 0 and after that starts destructing the object, there is a window where the ref count is 0 and the weak references have not been revoked. Also change WeakLink to be able to obtain a strong reference concurrently and block revoking instead, which should happen a lot less often. Fixes a problem observed in #4621 --- AK/RefCounted.h | 16 ++++++++- AK/WeakPtr.h | 32 ++++++++++++++--- AK/Weakable.h | 59 ++++++++++++++++++++++---------- Libraries/LibGUI/Application.cpp | 5 +++ 4 files changed, 88 insertions(+), 24 deletions(-) diff --git a/AK/RefCounted.h b/AK/RefCounted.h index d18395904b..17d6330f00 100644 --- a/AK/RefCounted.h +++ b/AK/RefCounted.h @@ -74,6 +74,18 @@ public: ASSERT(!Checked::addition_would_overflow(old_ref_count, 1)); } + ALWAYS_INLINE bool try_ref() const + { + RefCountType expected = m_ref_count.load(AK::MemoryOrder::memory_order_relaxed); + for (;;) { + if (expected == 0) + return false; + ASSERT(!Checked::addition_would_overflow(expected, 1)); + if (m_ref_count.compare_exchange_strong(expected, expected + 1, AK::MemoryOrder::memory_order_acquire)) + return true; + } + } + ALWAYS_INLINE RefCountType ref_count() const { return m_ref_count.load(AK::MemoryOrder::memory_order_relaxed); @@ -99,15 +111,17 @@ protected: template class RefCounted : public RefCountedBase { public: - void unref() const + bool unref() const { auto new_ref_count = deref_base(); if (new_ref_count == 0) { call_will_be_destroyed_if_present(static_cast(this)); delete static_cast(this); + return true; } else if (new_ref_count == 1) { call_one_ref_left_if_present(static_cast(this)); } + return false; } }; diff --git a/AK/WeakPtr.h b/AK/WeakPtr.h index 72d3b8af07..ed2bf470c4 100644 --- a/AK/WeakPtr.h +++ b/AK/WeakPtr.h @@ -201,16 +201,40 @@ template template inline WeakPtr Weakable::make_weak_ptr() const { -#ifdef DEBUG - ASSERT(!m_being_destroyed); -#endif + if constexpr (IsBaseOf::value) { + // Checking m_being_destroyed isn't sufficient when dealing with + // a RefCounted type.The reference count will drop to 0 before the + // destructor is invoked and revoke_weak_ptrs is called. So, try + // to add a ref (which should fail if the ref count is at 0) so + // that we prevent the destructor and revoke_weak_ptrs from being + // triggered until we're done. + if (!static_cast(this)->try_ref()) + return {}; + } else { + // For non-RefCounted types this means a weak reference can be + // obtained until the ~Weakable destructor is invoked! + if (m_being_destroyed.load(AK::MemoryOrder::memory_order_acquire)) + return {}; + } if (!m_link) { // There is a small chance that we create a new WeakLink and throw // it away because another thread beat us to it. But the window is // pretty small and the overhead isn't terrible. m_link.assign_if_null(adopt(*new WeakLink(const_cast(static_cast(*this))))); } - return WeakPtr(m_link); + + WeakPtr weak_ptr(m_link); + + if constexpr (IsBaseOf::value) { + // Now drop the reference we temporarily added + if (static_cast(this)->unref()) { + // We just dropped the last reference, which should have called + // revoke_weak_ptrs, which should have invalidated our weak_ptr + ASSERT(!weak_ptr.strong_ref()); + return {}; + } + } + return weak_ptr; } template diff --git a/AK/Weakable.h b/AK/Weakable.h index 71c0ee9415..e598895ac9 100644 --- a/AK/Weakable.h +++ b/AK/Weakable.h @@ -30,6 +30,9 @@ #include "Atomic.h" #include "RefCounted.h" #include "RefPtr.h" +#ifdef KERNEL +# include +#endif #ifndef WEAKABLE_DEBUG # define WEAKABLE_DEBUG @@ -56,14 +59,16 @@ public: { #ifdef KERNEL - // We don't want to be pre-empted while we have the lock bit set + // We don't want to be pre-empted while we are trying to obtain + // a strong reference Kernel::ScopedCritical critical; #endif - FlatPtr bits = RefPtrTraits::lock(m_bits); - T* ptr = static_cast(RefPtrTraits::as_ptr(bits)); - if (ptr) - ref = *ptr; - RefPtrTraits::unlock(m_bits, bits); + if (!(m_consumers.fetch_add(1u << 1, AK::MemoryOrder::memory_order_acquire) & 1u)) { + T* ptr = (T*)m_ptr.load(AK::MemoryOrder::memory_order_acquire); + if (ptr && ptr->try_ref()) + ref = adopt(*ptr); + } + m_consumers.fetch_sub(1u << 1, AK::MemoryOrder::memory_order_release); } return ref; @@ -72,26 +77,46 @@ public: template T* unsafe_ptr() const { - return static_cast(RefPtrTraits::as_ptr(m_bits.load(AK::MemoryOrder::memory_order_acquire))); + if (m_consumers.load(AK::MemoryOrder::memory_order_relaxed) & 1u) + return nullptr; + // NOTE: This may return a non-null pointer even if revocation + // has been triggered as there is a possible race! But it's "unsafe" + // anyway because we return a raw pointer without ensuring a + // reference... + return (T*)m_ptr.load(AK::MemoryOrder::memory_order_acquire); } bool is_null() const { - return RefPtrTraits::is_null(m_bits.load(AK::MemoryOrder::memory_order_relaxed)); + return !unsafe_ptr(); } void revoke() { - RefPtrTraits::exchange(m_bits, RefPtrTraits::default_null_value); + auto current_consumers = m_consumers.fetch_or(1u, AK::MemoryOrder::memory_order_relaxed); + ASSERT(!(current_consumers & 1u)); + // We flagged revokation, now wait until everyone trying to obtain + // a strong reference is done + while (current_consumers > 0) { +#ifdef KERNEL + Kernel::Processor::wait_check(); +#else + // TODO: yield? +#endif + current_consumers = m_consumers.load(AK::MemoryOrder::memory_order_acquire) & ~1u; + } + // No one is trying to use it (anymore) + m_ptr.store(nullptr, AK::MemoryOrder::memory_order_release); } private: template explicit WeakLink(T& weakable) - : m_bits(RefPtrTraits::as_bits(&weakable)) + : m_ptr(&weakable) { } - mutable Atomic m_bits; + mutable Atomic m_ptr; + mutable Atomic m_consumers; // LSB indicates revokation in progress }; template @@ -108,23 +133,19 @@ protected: ~Weakable() { -#ifdef WEAKABLE_DEBUG - m_being_destroyed = true; -#endif + m_being_destroyed.store(true, AK::MemoryOrder::memory_order_release); revoke_weak_ptrs(); } void revoke_weak_ptrs() { - if (m_link) - m_link->revoke(); + if (auto link = move(m_link)) + link->revoke(); } private: mutable RefPtr m_link; -#ifdef WEAKABLE_DEBUG - bool m_being_destroyed { false }; -#endif + Atomic m_being_destroyed { false }; }; } diff --git a/Libraries/LibGUI/Application.cpp b/Libraries/LibGUI/Application.cpp index a70e7d5b39..55cc9373c9 100644 --- a/Libraries/LibGUI/Application.cpp +++ b/Libraries/LibGUI/Application.cpp @@ -72,6 +72,11 @@ static NeverDestroyed> s_the; Application* Application::the() { + // NOTE: If we don't explicitly call revoke_weak_ptrs() in the + // ~Application destructor, we would have to change this to + // return s_the->strong_ref().ptr(); + // This is because this is using the unsafe operator*/operator-> + // that do not have the ability to check the ref count! return *s_the; }