2020-01-18 08:38:21 +00:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
|
|
|
|
* All rights reserved.
|
|
|
|
*
|
|
|
|
* Redistribution and use in source and binary forms, with or without
|
|
|
|
* modification, are permitted provided that the following conditions are met:
|
|
|
|
*
|
|
|
|
* 1. Redistributions of source code must retain the above copyright notice, this
|
|
|
|
* list of conditions and the following disclaimer.
|
|
|
|
*
|
|
|
|
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
* this list of conditions and the following disclaimer in the documentation
|
|
|
|
* and/or other materials provided with the distribution.
|
|
|
|
*
|
|
|
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
|
|
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
|
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
|
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
|
|
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
|
|
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
|
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
*/
|
|
|
|
|
2019-08-04 19:04:27 +00:00
|
|
|
#pragma once
|
|
|
|
|
2019-08-05 09:11:00 +00:00
|
|
|
// KBuffer: Statically sized kernel-only memory buffer.
|
|
|
|
//
|
|
|
|
// A KBuffer is a value-type convenience class that wraps a NonnullRefPtr<KBufferImpl>.
|
|
|
|
// The memory is allocated via the global kernel-only page allocator, rather than via
|
|
|
|
// kmalloc() which is what ByteBuffer/Vector/etc will use.
|
|
|
|
//
|
|
|
|
// This makes KBuffer a little heavier to allocate, but much better for large and/or
|
|
|
|
// long-lived allocations, since they don't put all that weight and pressure on the
|
|
|
|
// severely limited kmalloc heap.
|
|
|
|
|
2019-08-04 19:04:27 +00:00
|
|
|
#include <AK/Assertions.h>
|
2020-02-16 00:50:16 +00:00
|
|
|
#include <AK/ByteBuffer.h>
|
2019-08-06 05:31:52 +00:00
|
|
|
#include <AK/LogStream.h>
|
2020-03-08 11:33:14 +00:00
|
|
|
#include <AK/Memory.h>
|
2020-03-23 12:45:10 +00:00
|
|
|
#include <AK/StringView.h>
|
2019-08-04 19:04:27 +00:00
|
|
|
#include <Kernel/VM/MemoryManager.h>
|
|
|
|
#include <Kernel/VM/Region.h>
|
|
|
|
|
2020-02-16 00:27:42 +00:00
|
|
|
namespace Kernel {
|
|
|
|
|
2019-08-05 09:06:21 +00:00
|
|
|
class KBufferImpl : public RefCounted<KBufferImpl> {
|
2019-08-04 19:04:27 +00:00
|
|
|
public:
|
2020-09-17 19:51:09 +00:00
|
|
|
static RefPtr<KBufferImpl> try_create_with_size(size_t size, u8 access, const char* name = "KBuffer", AllocationStrategy strategy = AllocationStrategy::Reserve)
|
2019-08-04 19:04:27 +00:00
|
|
|
{
|
2021-02-14 08:57:19 +00:00
|
|
|
auto region = MM.allocate_kernel_region(page_round_up(size), name, access, strategy);
|
2020-12-18 12:14:59 +00:00
|
|
|
if (!region)
|
|
|
|
return nullptr;
|
2020-09-17 19:51:09 +00:00
|
|
|
return adopt(*new KBufferImpl(region.release_nonnull(), size, strategy));
|
2019-08-04 19:04:27 +00:00
|
|
|
}
|
|
|
|
|
2020-09-17 19:51:09 +00:00
|
|
|
static RefPtr<KBufferImpl> try_create_with_bytes(ReadonlyBytes bytes, u8 access, const char* name = "KBuffer", AllocationStrategy strategy = AllocationStrategy::Reserve)
|
2020-12-18 13:09:14 +00:00
|
|
|
{
|
2021-02-14 08:57:19 +00:00
|
|
|
auto region = MM.allocate_kernel_region(page_round_up(bytes.size()), name, access, strategy);
|
2020-12-18 13:09:14 +00:00
|
|
|
if (!region)
|
|
|
|
return nullptr;
|
2020-12-22 23:38:43 +00:00
|
|
|
memcpy(region->vaddr().as_ptr(), bytes.data(), bytes.size());
|
2020-09-17 19:51:09 +00:00
|
|
|
return adopt(*new KBufferImpl(region.release_nonnull(), bytes.size(), strategy));
|
2020-12-18 13:09:14 +00:00
|
|
|
}
|
|
|
|
|
2020-09-05 21:52:14 +00:00
|
|
|
static RefPtr<KBufferImpl> create_with_size(size_t size, u8 access, const char* name, AllocationStrategy strategy = AllocationStrategy::Reserve)
|
2020-12-18 12:14:59 +00:00
|
|
|
{
|
2020-09-05 21:52:14 +00:00
|
|
|
return try_create_with_size(size, access, name, strategy);
|
2020-12-18 12:14:59 +00:00
|
|
|
}
|
|
|
|
|
2020-09-05 21:52:14 +00:00
|
|
|
static RefPtr<KBufferImpl> copy(const void* data, size_t size, u8 access, const char* name)
|
2019-08-04 19:04:27 +00:00
|
|
|
{
|
2020-09-05 21:52:14 +00:00
|
|
|
auto buffer = create_with_size(size, access, name, AllocationStrategy::AllocateNow);
|
|
|
|
if (!buffer)
|
|
|
|
return {};
|
2019-08-04 19:04:27 +00:00
|
|
|
memcpy(buffer->data(), data, size);
|
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] bool expand(size_t new_capacity)
|
2020-09-17 19:51:09 +00:00
|
|
|
{
|
2021-02-14 08:57:19 +00:00
|
|
|
auto new_region = MM.allocate_kernel_region(page_round_up(new_capacity), m_region->name(), m_region->access(), m_allocation_strategy);
|
2020-09-17 19:51:09 +00:00
|
|
|
if (!new_region)
|
|
|
|
return false;
|
|
|
|
if (m_region && m_size > 0)
|
|
|
|
memcpy(new_region->vaddr().as_ptr(), data(), min(m_region->size(), m_size));
|
|
|
|
m_region = new_region.release_nonnull();
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] u8* data() { return m_region->vaddr().as_ptr(); }
|
|
|
|
[[nodiscard]] const u8* data() const { return m_region->vaddr().as_ptr(); }
|
|
|
|
[[nodiscard]] size_t size() const { return m_size; }
|
|
|
|
[[nodiscard]] size_t capacity() const { return m_region->size(); }
|
2019-08-04 19:04:27 +00:00
|
|
|
|
2019-08-06 05:31:52 +00:00
|
|
|
void set_size(size_t size)
|
|
|
|
{
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(size <= capacity());
|
2019-08-06 05:31:52 +00:00
|
|
|
m_size = size;
|
|
|
|
}
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] const Region& region() const { return *m_region; }
|
|
|
|
[[nodiscard]] Region& region() { return *m_region; }
|
2020-02-21 11:59:30 +00:00
|
|
|
|
2019-08-04 19:04:27 +00:00
|
|
|
private:
|
2020-09-17 19:51:09 +00:00
|
|
|
explicit KBufferImpl(NonnullOwnPtr<Region>&& region, size_t size, AllocationStrategy strategy)
|
2019-08-04 19:04:27 +00:00
|
|
|
: m_size(size)
|
2020-09-17 19:51:09 +00:00
|
|
|
, m_allocation_strategy(strategy)
|
2019-08-04 19:04:27 +00:00
|
|
|
, m_region(move(region))
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t m_size { 0 };
|
2020-09-17 19:51:09 +00:00
|
|
|
AllocationStrategy m_allocation_strategy { AllocationStrategy::Reserve };
|
2019-09-27 12:19:07 +00:00
|
|
|
NonnullOwnPtr<Region> m_region;
|
2019-08-04 19:04:27 +00:00
|
|
|
};
|
2019-08-05 09:06:21 +00:00
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
class [[nodiscard]] KBuffer {
|
2019-08-05 09:06:21 +00:00
|
|
|
public:
|
2020-09-17 19:51:09 +00:00
|
|
|
explicit KBuffer(RefPtr<KBufferImpl>&& impl)
|
|
|
|
: m_impl(move(impl))
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] static OwnPtr<KBuffer> try_create_with_size(size_t size, u8 access = Region::Access::Read | Region::Access::Write, const char* name = "KBuffer", AllocationStrategy strategy = AllocationStrategy::Reserve)
|
2020-12-18 12:14:59 +00:00
|
|
|
{
|
2021-01-02 03:15:06 +00:00
|
|
|
auto impl = KBufferImpl::try_create_with_size(size, access, name, strategy);
|
2020-12-18 12:14:59 +00:00
|
|
|
if (!impl)
|
2021-01-10 23:29:28 +00:00
|
|
|
return {};
|
2020-12-18 12:14:59 +00:00
|
|
|
return adopt_own(*new KBuffer(impl.release_nonnull()));
|
|
|
|
}
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] static OwnPtr<KBuffer> try_create_with_bytes(ReadonlyBytes bytes, u8 access = Region::Access::Read | Region::Access::Write, const char* name = "KBuffer", AllocationStrategy strategy = AllocationStrategy::Reserve)
|
2020-12-18 13:09:14 +00:00
|
|
|
{
|
2021-01-02 03:15:06 +00:00
|
|
|
auto impl = KBufferImpl::try_create_with_bytes(bytes, access, name, strategy);
|
2020-12-18 13:09:14 +00:00
|
|
|
if (!impl)
|
2021-01-10 23:29:28 +00:00
|
|
|
return {};
|
2020-12-18 13:09:14 +00:00
|
|
|
return adopt_own(*new KBuffer(impl.release_nonnull()));
|
|
|
|
}
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] static KBuffer create_with_size(size_t size, u8 access = Region::Access::Read | Region::Access::Write, const char* name = "KBuffer", AllocationStrategy strategy = AllocationStrategy::Reserve)
|
2019-08-05 09:06:21 +00:00
|
|
|
{
|
2021-01-02 03:15:06 +00:00
|
|
|
return KBuffer(KBufferImpl::create_with_size(size, access, name, strategy));
|
2019-08-05 09:06:21 +00:00
|
|
|
}
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] static KBuffer copy(const void* data, size_t size, u8 access = Region::Access::Read | Region::Access::Write, const char* name = "KBuffer")
|
2019-08-05 09:06:21 +00:00
|
|
|
{
|
2020-01-20 13:00:11 +00:00
|
|
|
return KBuffer(KBufferImpl::copy(data, size, access, name));
|
2019-08-05 09:06:21 +00:00
|
|
|
}
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] bool is_null() const { return !m_impl; }
|
2020-09-05 21:52:14 +00:00
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] u8* data() { return m_impl ? m_impl->data() : nullptr; }
|
|
|
|
[[nodiscard]] const u8* data() const { return m_impl ? m_impl->data() : nullptr; }
|
|
|
|
[[nodiscard]] size_t size() const { return m_impl ? m_impl->size() : 0; }
|
|
|
|
[[nodiscard]] size_t capacity() const { return m_impl ? m_impl->capacity() : 0; }
|
2019-08-05 09:06:21 +00:00
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] void* end_pointer() { return data() + size(); }
|
|
|
|
[[nodiscard]] const void* end_pointer() const { return data() + size(); }
|
2020-08-11 17:47:24 +00:00
|
|
|
|
2019-08-06 05:31:52 +00:00
|
|
|
void set_size(size_t size) { m_impl->set_size(size); }
|
|
|
|
|
2021-02-14 23:25:18 +00:00
|
|
|
[[nodiscard]] const KBufferImpl& impl() const { return *m_impl; }
|
|
|
|
[[nodiscard]] RefPtr<KBufferImpl> take_impl() { return move(m_impl); }
|
2019-08-05 09:06:21 +00:00
|
|
|
|
2020-01-20 13:00:11 +00:00
|
|
|
KBuffer(const ByteBuffer& buffer, u8 access = Region::Access::Read | Region::Access::Write, const char* name = "KBuffer")
|
|
|
|
: m_impl(KBufferImpl::copy(buffer.data(), buffer.size(), access, name))
|
2019-08-05 09:06:21 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2020-09-05 21:52:14 +00:00
|
|
|
RefPtr<KBufferImpl> m_impl;
|
2019-08-05 09:06:21 +00:00
|
|
|
};
|
2019-08-06 05:31:52 +00:00
|
|
|
|
|
|
|
inline const LogStream& operator<<(const LogStream& stream, const KBuffer& value)
|
|
|
|
{
|
|
|
|
return stream << StringView(value.data(), value.size());
|
|
|
|
}
|
2020-02-16 00:27:42 +00:00
|
|
|
|
|
|
|
}
|