AK+Everywhere: Make FixedArray OOM-safe

FixedArray now doesn't expose any infallible constructors anymore.
Rather, it exposes fallible methods. Therefore, it can be used for
OOM-safe code.
This commit also converts the rest of the system to use the new API.
However, as an example, VMObject can't take advantage of this yet,
as we would have to endow VMObject with a fallible static
construction method, which would require a very fundamental change
to VMObject's whole inheritance hierarchy.
This commit is contained in:
creator1creeper1 2022-01-08 20:06:03 +01:00 committed by Linus Groh
parent a7b70c62d7
commit 3c05261611
Notes: sideshowbarker 2024-07-17 21:24:30 +09:00
5 changed files with 62 additions and 34 deletions

View file

@ -1,11 +1,13 @@
/* /*
* Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org> * Copyright (c) 2018-2022, Andreas Kling <kling@serenityos.org>
* Copyright (c) 2022, the SerenityOS developers.
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
#pragma once #pragma once
#include <AK/Error.h>
#include <AK/Iterator.h> #include <AK/Iterator.h>
#include <AK/Span.h> #include <AK/Span.h>
#include <AK/kmalloc.h> #include <AK/kmalloc.h>
@ -16,40 +18,60 @@ template<typename T>
class FixedArray { class FixedArray {
public: public:
FixedArray() = default; FixedArray() = default;
explicit FixedArray(size_t size)
: m_size(size) static ErrorOr<FixedArray<T>> try_create(size_t size)
{ {
if (m_size != 0) { if (size == 0)
m_elements = static_cast<T*>(kmalloc_array(m_size, sizeof(T))); return FixedArray<T>();
for (size_t i = 0; i < m_size; ++i) T* elements = static_cast<T*>(kmalloc_array(size, sizeof(T)));
new (&m_elements[i]) T(); if (!elements)
} return Error::from_errno(ENOMEM);
for (size_t i = 0; i < size; ++i)
new (&elements[i]) T();
return FixedArray<T>(size, elements);
} }
static FixedArray<T> must_create_but_fixme_should_propagate_errors(size_t size)
{
return MUST(try_create(size));
}
ErrorOr<FixedArray<T>> try_clone() const
{
if (m_size == 0)
return FixedArray<T>();
T* elements = static_cast<T*>(kmalloc_array(m_size, sizeof(T)));
if (!elements)
return Error::from_errno(ENOMEM);
for (size_t i = 0; i < m_size; ++i)
new (&elements[i]) T(m_elements[i]);
return FixedArray<T>(m_size, elements);
}
FixedArray<T> must_clone_but_fixme_should_propagate_errors() const
{
return MUST(try_clone());
}
// NOTE: Nobody can ever use these functions, since it would be impossible to make them OOM-safe due to their signatures. We just explicitly delete them.
FixedArray(FixedArray<T> const&) = delete;
FixedArray<T>& operator=(FixedArray<T> const&) = delete;
FixedArray(FixedArray<T>&& other)
: m_size(other.m_size)
, m_elements(other.m_elements)
{
other.m_size = 0;
other.m_elements = nullptr;
}
// NOTE: Nobody uses this function, so we just explicitly delete it.
FixedArray<T>& operator=(FixedArray<T>&&) = delete;
~FixedArray() ~FixedArray()
{ {
clear(); clear();
} }
FixedArray(FixedArray const& other)
: m_size(other.m_size)
{
if (m_size != 0) {
m_elements = static_cast<T*>(kmalloc_array(m_size, sizeof(T)));
for (size_t i = 0; i < m_size; ++i)
new (&m_elements[i]) T(other[i]);
}
}
FixedArray& operator=(FixedArray const& other)
{
FixedArray array(other);
swap(array);
return *this;
}
FixedArray(FixedArray&&) = delete;
FixedArray& operator=(FixedArray&&) = delete;
void clear() void clear()
{ {
if (!m_elements) if (!m_elements)
@ -86,7 +108,7 @@ public:
return false; return false;
} }
void swap(FixedArray& other) void swap(FixedArray<T>& other)
{ {
::swap(m_elements, other.m_elements); ::swap(m_elements, other.m_elements);
::swap(m_size, other.m_size); ::swap(m_size, other.m_size);
@ -105,6 +127,12 @@ public:
Span<T> span() { return { data(), size() }; } Span<T> span() { return { data(), size() }; }
private: private:
FixedArray(size_t size, T* elements)
: m_size(size)
, m_elements(elements)
{
}
size_t m_size { 0 }; size_t m_size { 0 };
T* m_elements { nullptr }; T* m_elements { nullptr };
}; };

View file

@ -18,13 +18,13 @@ SpinlockProtected<VMObject::AllInstancesList>& VMObject::all_instances()
} }
VMObject::VMObject(VMObject const& other) VMObject::VMObject(VMObject const& other)
: m_physical_pages(other.m_physical_pages) : m_physical_pages(other.m_physical_pages.must_clone_but_fixme_should_propagate_errors())
{ {
all_instances().with([&](auto& list) { list.append(*this); }); all_instances().with([&](auto& list) { list.append(*this); });
} }
VMObject::VMObject(size_t size) VMObject::VMObject(size_t size)
: m_physical_pages(ceil_div(size, static_cast<size_t>(PAGE_SIZE))) : m_physical_pages(FixedArray<RefPtr<PhysicalPage>>::must_create_but_fixme_should_propagate_errors(ceil_div(size, static_cast<size_t>(PAGE_SIZE))))
{ {
all_instances().with([&](auto& list) { list.append(*this); }); all_instances().with([&](auto& list) { list.append(*this); });
} }

View file

@ -16,7 +16,7 @@ TEST_CASE(construct)
TEST_CASE(ints) TEST_CASE(ints)
{ {
FixedArray<int> ints(3); FixedArray<int> ints = FixedArray<int>::must_create_but_fixme_should_propagate_errors(3);
ints[0] = 0; ints[0] = 0;
ints[1] = 1; ints[1] = 1;
ints[2] = 2; ints[2] = 2;

View file

@ -89,7 +89,7 @@ public:
static NonnullRefPtr<Buffer> create_empty() static NonnullRefPtr<Buffer> create_empty()
{ {
// If we can't allocate an empty buffer, things are in a very bad state. // If we can't allocate an empty buffer, things are in a very bad state.
return MUST(adopt_nonnull_ref_or_enomem(new (nothrow) Buffer(FixedArray<Sample> {}))); return MUST(adopt_nonnull_ref_or_enomem(new (nothrow) Buffer(FixedArray<Sample>())));
} }
Sample const* samples() const { return (const Sample*)data(); } Sample const* samples() const { return (const Sample*)data(); }

View file

@ -197,7 +197,7 @@ LoaderSamples FlacLoaderPlugin::get_more_samples(size_t max_bytes_to_read_from_i
return Buffer::create_empty(); return Buffer::create_empty();
size_t samples_to_read = min(max_bytes_to_read_from_input, remaining_samples); size_t samples_to_read = min(max_bytes_to_read_from_input, remaining_samples);
auto samples = FixedArray<Sample>(samples_to_read); auto samples = FixedArray<Sample>::must_create_but_fixme_should_propagate_errors(samples_to_read);
size_t sample_index = 0; size_t sample_index = 0;
if (m_unread_data.size() > 0) { if (m_unread_data.size() > 0) {