Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement MCS Combining lock #666

Merged
merged 4 commits into from
Jun 28, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 23 additions & 19 deletions src/snmalloc/backend/globalconfig.h
Original file line number Diff line number Diff line change
Expand Up @@ -96,33 +96,37 @@ namespace snmalloc
// of allocators.
SNMALLOC_SLOW_PATH static void ensure_init_slow()
{
FlagLock lock{initialisation_lock};
if (initialised)
return;

with(initialisation_lock, [&]() {
#ifdef SNMALLOC_TRACING
message<1024>("Run init_impl");
message<1024>("Run init_impl");
#endif

if (initialised)
return;
if (initialised)
return;

LocalEntropy entropy;
entropy.init<Pal>();
// Initialise key for remote deallocation lists
RemoteAllocator::key_global = FreeListKey(entropy.get_free_list_key());
LocalEntropy entropy;
entropy.init<Pal>();
// Initialise key for remote deallocation lists
RemoteAllocator::key_global = FreeListKey(entropy.get_free_list_key());

// Need to randomise pagemap location. If requested and not a
// StrictProvenance architecture, randomize its table's location within a
// significantly larger address space allocation.
static constexpr bool pagemap_randomize =
mitigations(random_pagemap) && !aal_supports<StrictProvenance>;
// Need to randomise pagemap location. If requested and not a
// StrictProvenance architecture, randomize its table's location within
// a significantly larger address space allocation.
static constexpr bool pagemap_randomize =
mitigations(random_pagemap) && !aal_supports<StrictProvenance>;

Pagemap::concretePagemap.template init<pagemap_randomize>();
Pagemap::concretePagemap.template init<pagemap_randomize>();

if constexpr (aal_supports<StrictProvenance>)
{
Authmap::init();
}
if constexpr (aal_supports<StrictProvenance>)
{
Authmap::init();
}

initialised.store(true, std::memory_order_release);
initialised.store(true, std::memory_order_release);
});
}

public:
Expand Down
12 changes: 8 additions & 4 deletions src/snmalloc/backend_helpers/lockrange.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,18 @@ namespace snmalloc

CapPtr<void, ChunkBounds> alloc_range(size_t size)
{
FlagLock lock(spin_lock);
return parent.alloc_range(size);
CapPtr<void, ChunkBounds> result;
with(spin_lock, [&]() {
{
result = parent.alloc_range(size);
}
});
return result;
}

void dealloc_range(CapPtr<void, ChunkBounds> base, size_t size)
{
FlagLock lock(spin_lock);
parent.dealloc_range(base, size);
with(spin_lock, [&]() { parent.dealloc_range(base, size); });
}
};
};
Expand Down
7 changes: 7 additions & 0 deletions src/snmalloc/ds/flaglock.h
Original file line number Diff line number Diff line change
Expand Up @@ -133,4 +133,11 @@ namespace snmalloc
lock.flag.store(false, std::memory_order_release);
}
};

template <typename F>
inline void with(FlagWord& lock, F&& f)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not when? ;)

{
FlagLock l(lock);
f();
}
} // namespace snmalloc
17 changes: 9 additions & 8 deletions src/snmalloc/ds/singleton.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,15 @@ namespace snmalloc

if (SNMALLOC_UNLIKELY(!initialised.load(std::memory_order_acquire)))
{
FlagLock lock(flag);
if (!initialised)
{
init(&obj);
initialised.store(true, std::memory_order_release);
if (first != nullptr)
*first = true;
}
with(flag, [&]() {
if (!initialised)
{
init(&obj);
initialised.store(true, std::memory_order_release);
if (first != nullptr)
*first = true;
}
});
}
return obj;
}
Expand Down
78 changes: 43 additions & 35 deletions src/snmalloc/mem/pool.h
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,9 @@ namespace snmalloc
static T* acquire()
{
PoolState<T>& pool = get_state();
{
FlagLock f(pool.lock);

T* result{nullptr};
with(pool.lock, [&]() {
if (pool.front != nullptr)
{
auto p = pool.front;
Expand All @@ -112,17 +113,21 @@ namespace snmalloc
}
pool.front = next;
p->set_in_use();
return p.unsafe_ptr();
result = p.unsafe_ptr();
}
}
});

if (result != nullptr)
return result;

auto p = ConstructT::make();

FlagLock f(pool.lock);
p->list_next = pool.list;
pool.list = p;
with(pool.lock, [&]() {
p->list_next = pool.list;
pool.list = p;

p->set_in_use();
p->set_in_use();
});
return p.unsafe_ptr();
}

Expand All @@ -146,11 +151,13 @@ namespace snmalloc
// Returns a linked list of all objects in the stack, emptying the stack.
if (p == nullptr)
{
FlagLock f(pool.lock);
auto result = pool.front;
pool.front = nullptr;
pool.back = nullptr;
return result.unsafe_ptr();
T* result;
with(pool.lock, [&]() {
result = pool.front.unsafe_ptr();
pool.front = nullptr;
pool.back = nullptr;
});
return result;
}

return p->next.unsafe_ptr();
Expand All @@ -165,18 +172,18 @@ namespace snmalloc
{
PoolState<T>& pool = get_state();
last->next = nullptr;
FlagLock f(pool.lock);

if (pool.front == nullptr)
{
pool.front = capptr::Alloc<T>::unsafe_from(first);
}
else
{
pool.back->next = capptr::Alloc<T>::unsafe_from(first);
}
with(pool.lock, [&]() {
if (pool.front == nullptr)
{
pool.front = capptr::Alloc<T>::unsafe_from(first);
}
else
{
pool.back->next = capptr::Alloc<T>::unsafe_from(first);
}

pool.back = capptr::Alloc<T>::unsafe_from(last);
pool.back = capptr::Alloc<T>::unsafe_from(last);
});
}

/**
Expand All @@ -188,18 +195,19 @@ namespace snmalloc
{
PoolState<T>& pool = get_state();
last->next = nullptr;
FlagLock f(pool.lock);

if (pool.front == nullptr)
{
pool.back = capptr::Alloc<T>::unsafe_from(last);
}
else
{
last->next = pool.front;
pool.back->next = capptr::Alloc<T>::unsafe_from(first);
}
pool.front = capptr::Alloc<T>::unsafe_from(first);
with(pool.lock, [&]() {
if (pool.front == nullptr)
{
pool.back = capptr::Alloc<T>::unsafe_from(last);
}
else
{
last->next = pool.front;
pool.back->next = capptr::Alloc<T>::unsafe_from(first);
}
pool.front = capptr::Alloc<T>::unsafe_from(first);
});
}

static T* iterate(T* p = nullptr)
Expand Down