Kernel: Port VMObject to ListedRefCounted

The VMObject class now manages its own instance list (it was previously
a member of MemoryManager.) Removal from the list is done safely on the
last unref(), closing a race window in the previous implementation.

Note that VMObject::all_instances() now has its own lock instead of
using the global MM lock.
This commit is contained in:
Andreas Kling 2021-08-16 22:54:25 +02:00
parent 3a2d888913
commit 7979b5a8bb
4 changed files with 26 additions and 28 deletions

View file

@ -1053,18 +1053,6 @@ bool MemoryManager::validate_user_stack(AddressSpace& space, VirtualAddress vadd
return validate_user_stack_no_lock(space, vaddr);
}
void MemoryManager::register_vmobject(VMObject& vmobject)
{
ScopedSpinLock lock(s_mm_lock);
m_vmobjects.append(vmobject);
}
void MemoryManager::unregister_vmobject(VMObject& vmobject)
{
ScopedSpinLock lock(s_mm_lock);
m_vmobjects.remove(vmobject);
}
void MemoryManager::register_region(Region& region)
{
ScopedSpinLock lock(s_mm_lock);

View file

@ -204,18 +204,22 @@ public:
template<IteratorFunction<VMObject&> Callback>
static void for_each_vmobject(Callback callback)
{
ScopedSpinLock locker(s_mm_lock);
for (auto& vmobject : MM.m_vmobjects) {
if (callback(vmobject) == IterationDecision::Break)
break;
}
VMObject::all_instances().with([&](auto& list) {
for (auto& vmobject : list) {
if (callback(vmobject) == IterationDecision::Break)
break;
}
});
}
template<VoidFunction<VMObject&> Callback>
static void for_each_vmobject(Callback callback)
{
for (auto& vmobject : MM.m_vmobjects)
callback(vmobject);
VMObject::all_instances().with([&](auto& list) {
for (auto& vmobject : list) {
callback(vmobject);
}
});
}
static Region* find_user_region_from_vaddr(AddressSpace&, VirtualAddress);
@ -242,8 +246,6 @@ private:
void initialize_physical_pages();
void register_reserved_ranges();
void register_vmobject(VMObject&);
void unregister_vmobject(VMObject&);
void register_region(Region&);
void unregister_region(Region&);
@ -289,8 +291,6 @@ private:
Vector<UsedMemoryRange> m_used_memory_ranges;
Vector<PhysicalMemoryRange> m_physical_memory_ranges;
Vector<ContiguousReservedMemoryRange> m_reserved_memory_ranges;
VMObject::List m_vmobjects;
};
inline bool is_user_address(VirtualAddress vaddr)

View file

@ -4,21 +4,29 @@
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Singleton.h>
#include <Kernel/Memory/MemoryManager.h>
#include <Kernel/Memory/VMObject.h>
namespace Kernel::Memory {
static Singleton<SpinLockProtectedValue<VMObject::AllInstancesList>> s_all_instances;
SpinLockProtectedValue<VMObject::AllInstancesList>& VMObject::all_instances()
{
return s_all_instances;
}
VMObject::VMObject(VMObject const& other)
: m_physical_pages(other.m_physical_pages)
{
MM.register_vmobject(*this);
all_instances().with([&](auto& list) { list.append(*this); });
}
VMObject::VMObject(size_t size)
: m_physical_pages(ceil_div(size, static_cast<size_t>(PAGE_SIZE)))
{
MM.register_vmobject(*this);
all_instances().with([&](auto& list) { list.append(*this); });
}
VMObject::~VMObject()
@ -30,7 +38,6 @@ VMObject::~VMObject()
m_on_deleted.clear();
}
MM.unregister_vmobject(*this);
VERIFY(m_regions.is_empty());
}

View file

@ -14,6 +14,7 @@
#include <AK/Vector.h>
#include <AK/Weakable.h>
#include <Kernel/Forward.h>
#include <Kernel/Library/ListedRefCounted.h>
#include <Kernel/Locking/Mutex.h>
#include <Kernel/Memory/Region.h>
@ -25,7 +26,8 @@ public:
virtual void vmobject_deleted(VMObject&) = 0;
};
class VMObject : public RefCounted<VMObject>
class VMObject
: public ListedRefCounted<VMObject>
, public Weakable<VMObject> {
friend class MemoryManager;
friend class Region;
@ -95,7 +97,8 @@ private:
Region::ListInVMObject m_regions;
public:
using List = IntrusiveList<VMObject, RawPtr<VMObject>, &VMObject::m_list_node>;
using AllInstancesList = IntrusiveList<VMObject, RawPtr<VMObject>, &VMObject::m_list_node>;
static SpinLockProtectedValue<VMObject::AllInstancesList>& all_instances();
};
template<typename Callback>