Kernel/Locking: Add lock rank tracking to Spinlock/RecursiveSpinlock

This commit is contained in:
Brian Gianforcaro 2021-09-07 02:58:17 -07:00 committed by Andreas Kling
parent 066b0590ec
commit f6b1517426

View file

@ -9,7 +9,7 @@
#include <AK/Atomic.h> #include <AK/Atomic.h>
#include <AK/Types.h> #include <AK/Types.h>
#include <Kernel/Arch/x86/Processor.h> #include <Kernel/Arch/x86/Processor.h>
#include <Kernel/Forward.h> #include <Kernel/Locking/LockRank.h>
namespace Kernel { namespace Kernel {
@ -18,7 +18,10 @@ class Spinlock {
AK_MAKE_NONMOVABLE(Spinlock); AK_MAKE_NONMOVABLE(Spinlock);
public: public:
Spinlock() = default; Spinlock(LockRank rank = LockRank::None)
: m_rank(rank)
{
}
ALWAYS_INLINE u32 lock() ALWAYS_INLINE u32 lock()
{ {
@ -28,17 +31,20 @@ public:
while (m_lock.exchange(1, AK::memory_order_acquire) != 0) { while (m_lock.exchange(1, AK::memory_order_acquire) != 0) {
Processor::wait_check(); Processor::wait_check();
} }
track_lock_acquire(m_rank);
return prev_flags; return prev_flags;
} }
ALWAYS_INLINE void unlock(u32 prev_flags) ALWAYS_INLINE void unlock(u32 prev_flags)
{ {
VERIFY(is_locked()); VERIFY(is_locked());
track_lock_release(m_rank);
m_lock.store(0, AK::memory_order_release); m_lock.store(0, AK::memory_order_release);
if (prev_flags & 0x200) if (prev_flags & 0x200)
sti(); sti();
else else
cli(); cli();
Processor::leave_critical(); Processor::leave_critical();
} }
@ -54,6 +60,7 @@ public:
private: private:
Atomic<u8> m_lock { 0 }; Atomic<u8> m_lock { 0 };
const LockRank m_rank;
}; };
class RecursiveSpinlock { class RecursiveSpinlock {
@ -61,7 +68,10 @@ class RecursiveSpinlock {
AK_MAKE_NONMOVABLE(RecursiveSpinlock); AK_MAKE_NONMOVABLE(RecursiveSpinlock);
public: public:
RecursiveSpinlock() = default; RecursiveSpinlock(LockRank rank = LockRank::None)
: m_rank(rank)
{
}
ALWAYS_INLINE u32 lock() ALWAYS_INLINE u32 lock()
{ {
@ -77,6 +87,8 @@ public:
Processor::wait_check(); Processor::wait_check();
expected = 0; expected = 0;
} }
if (m_recursions == 0)
track_lock_acquire(m_rank);
m_recursions++; m_recursions++;
return prev_flags; return prev_flags;
} }
@ -85,12 +97,15 @@ public:
{ {
VERIFY(m_recursions > 0); VERIFY(m_recursions > 0);
VERIFY(m_lock.load(AK::memory_order_relaxed) == FlatPtr(&Processor::current())); VERIFY(m_lock.load(AK::memory_order_relaxed) == FlatPtr(&Processor::current()));
if (--m_recursions == 0) if (--m_recursions == 0) {
track_lock_release(m_rank);
m_lock.store(0, AK::memory_order_release); m_lock.store(0, AK::memory_order_release);
}
if (prev_flags & 0x200) if (prev_flags & 0x200)
sti(); sti();
else else
cli(); cli();
Processor::leave_critical(); Processor::leave_critical();
} }
@ -112,6 +127,7 @@ public:
private: private:
Atomic<FlatPtr> m_lock { 0 }; Atomic<FlatPtr> m_lock { 0 };
u32 m_recursions { 0 }; u32 m_recursions { 0 };
const LockRank m_rank;
}; };
template<typename LockType> template<typename LockType>