2020-01-18 08:38:21 +00:00
|
|
|
/*
|
2024-10-04 11:19:50 +00:00
|
|
|
* Copyright (c) 2018-2021, Andreas Kling <andreas@ladybird.org>
|
2021-05-14 18:53:04 +00:00
|
|
|
* Copyright (c) 2021, Gunnar Beutner <gbeutner@serenityos.org>
|
2020-01-18 08:38:21 +00:00
|
|
|
*
|
2021-04-22 08:24:48 +00:00
|
|
|
* SPDX-License-Identifier: BSD-2-Clause
|
2020-01-18 08:38:21 +00:00
|
|
|
*/
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
#pragma once
|
|
|
|
|
2021-06-07 12:00:29 +00:00
|
|
|
#include <AK/Assertions.h>
|
2024-07-19 19:38:41 +00:00
|
|
|
#include <AK/Badge.h>
|
2021-11-10 13:33:44 +00:00
|
|
|
#include <AK/Error.h>
|
2020-07-27 12:15:37 +00:00
|
|
|
#include <AK/Span.h>
|
2019-09-11 17:35:14 +00:00
|
|
|
#include <AK/Types.h>
|
2019-03-16 12:12:13 +00:00
|
|
|
#include <AK/kmalloc.h>
|
2018-10-10 09:53:07 +00:00
|
|
|
|
|
|
|
namespace AK {
|
2021-05-14 18:53:04 +00:00
|
|
|
namespace Detail {
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
template<size_t inline_capacity>
|
|
|
|
class ByteBuffer {
|
2019-03-16 12:12:13 +00:00
|
|
|
public:
|
2021-05-14 18:53:04 +00:00
|
|
|
ByteBuffer() = default;
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
~ByteBuffer()
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
2021-05-14 18:53:04 +00:00
|
|
|
clear();
|
2019-03-16 12:12:13 +00:00
|
|
|
}
|
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
ByteBuffer(ByteBuffer const& other)
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
2021-11-10 13:33:44 +00:00
|
|
|
MUST(try_resize(other.size()));
|
2021-05-14 18:53:04 +00:00
|
|
|
VERIFY(m_size == other.size());
|
|
|
|
__builtin_memcpy(data(), other.data(), other.size());
|
2019-05-28 09:53:16 +00:00
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
|
|
|
|
ByteBuffer(ByteBuffer&& other)
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
2021-05-14 18:53:04 +00:00
|
|
|
move_from(move(other));
|
2019-05-28 09:53:16 +00:00
|
|
|
}
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
ByteBuffer& operator=(ByteBuffer&& other)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
2021-05-14 18:53:04 +00:00
|
|
|
if (this != &other) {
|
2021-05-30 22:31:46 +00:00
|
|
|
if (!m_inline)
|
2021-07-11 11:22:40 +00:00
|
|
|
kfree_sized(m_outline_buffer, m_outline_capacity);
|
2021-05-14 18:53:04 +00:00
|
|
|
move_from(move(other));
|
|
|
|
}
|
|
|
|
return *this;
|
2019-03-16 12:12:13 +00:00
|
|
|
}
|
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
ByteBuffer& operator=(ByteBuffer const& other)
|
|
|
|
{
|
|
|
|
if (this != &other) {
|
2021-09-04 13:04:45 +00:00
|
|
|
if (m_size > other.size()) {
|
2021-05-30 22:39:08 +00:00
|
|
|
trim(other.size(), true);
|
2021-09-04 13:04:45 +00:00
|
|
|
} else {
|
2021-11-10 13:33:44 +00:00
|
|
|
MUST(try_resize(other.size()));
|
2021-09-04 13:04:45 +00:00
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
__builtin_memcpy(data(), other.data(), other.size());
|
|
|
|
}
|
|
|
|
return *this;
|
|
|
|
}
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2022-01-20 17:47:39 +00:00
|
|
|
[[nodiscard]] static ErrorOr<ByteBuffer> create_uninitialized(size_t size)
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
2021-09-04 13:04:45 +00:00
|
|
|
auto buffer = ByteBuffer();
|
2022-01-20 17:47:39 +00:00
|
|
|
TRY(buffer.try_resize(size));
|
2021-09-05 22:59:52 +00:00
|
|
|
return { move(buffer) };
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
|
2022-01-20 17:47:39 +00:00
|
|
|
[[nodiscard]] static ErrorOr<ByteBuffer> create_zeroed(size_t size)
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
2022-01-20 17:47:39 +00:00
|
|
|
auto buffer = TRY(create_uninitialized(size));
|
2021-09-05 22:59:52 +00:00
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
buffer.zero_fill();
|
|
|
|
VERIFY(size == 0 || (buffer[0] == 0 && buffer[size - 1] == 0));
|
2022-01-20 17:47:39 +00:00
|
|
|
return { move(buffer) };
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
|
2022-01-20 17:47:39 +00:00
|
|
|
[[nodiscard]] static ErrorOr<ByteBuffer> copy(void const* data, size_t size)
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
2022-01-20 17:47:39 +00:00
|
|
|
auto buffer = TRY(create_uninitialized(size));
|
2022-05-07 15:41:26 +00:00
|
|
|
if (buffer.m_inline && size > inline_capacity)
|
2022-12-31 18:10:42 +00:00
|
|
|
VERIFY_NOT_REACHED();
|
2022-01-20 17:47:39 +00:00
|
|
|
if (size != 0)
|
|
|
|
__builtin_memcpy(buffer.data(), data, size);
|
|
|
|
return { move(buffer) };
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
|
2022-01-20 17:47:39 +00:00
|
|
|
[[nodiscard]] static ErrorOr<ByteBuffer> copy(ReadonlyBytes bytes)
|
2018-10-26 22:14:24 +00:00
|
|
|
{
|
2021-05-14 18:53:04 +00:00
|
|
|
return copy(bytes.data(), bytes.size());
|
2018-10-26 22:14:24 +00:00
|
|
|
}
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2024-04-07 14:41:51 +00:00
|
|
|
[[nodiscard]] static ErrorOr<ByteBuffer> xor_buffers(ReadonlyBytes first, ReadonlyBytes second)
|
|
|
|
{
|
|
|
|
if (first.size() != second.size())
|
|
|
|
return Error::from_errno(EINVAL);
|
|
|
|
|
|
|
|
auto buffer = TRY(create_uninitialized(first.size()));
|
|
|
|
auto buffer_data = buffer.data();
|
|
|
|
auto first_data = first.data();
|
|
|
|
auto second_data = second.data();
|
|
|
|
for (size_t i = 0; i < first.size(); ++i)
|
|
|
|
buffer_data[i] = first_data[i] ^ second_data[i];
|
|
|
|
|
|
|
|
return { move(buffer) };
|
|
|
|
}
|
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
template<size_t other_inline_capacity>
|
|
|
|
bool operator==(ByteBuffer<other_inline_capacity> const& other) const
|
|
|
|
{
|
|
|
|
if (size() != other.size())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// So they both have data, and the same length.
|
|
|
|
return !__builtin_memcmp(data(), other.data(), size());
|
|
|
|
}
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] u8& operator[](size_t i)
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
2021-05-14 18:53:04 +00:00
|
|
|
VERIFY(i < m_size);
|
|
|
|
return data()[i];
|
2019-05-28 09:53:16 +00:00
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
|
|
|
|
[[nodiscard]] u8 const& operator[](size_t i) const
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
2021-05-14 18:53:04 +00:00
|
|
|
VERIFY(i < m_size);
|
|
|
|
return data()[i];
|
2019-05-28 09:53:16 +00:00
|
|
|
}
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-11-06 20:12:16 +00:00
|
|
|
[[nodiscard]] bool is_empty() const { return m_size == 0; }
|
2021-05-14 18:53:04 +00:00
|
|
|
[[nodiscard]] size_t size() const { return m_size; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2023-05-11 09:16:08 +00:00
|
|
|
#ifdef AK_COMPILER_GCC
|
|
|
|
# pragma GCC diagnostic push
|
|
|
|
// Workaround for https://gcc.gnu.org/bugzilla/show_bug.cgi?id=109727
|
|
|
|
# pragma GCC diagnostic ignored "-Warray-bounds"
|
|
|
|
#endif
|
|
|
|
[[nodiscard]] u8* data()
|
|
|
|
{
|
|
|
|
return m_inline ? m_inline_buffer : m_outline_buffer;
|
|
|
|
}
|
2021-05-30 22:31:46 +00:00
|
|
|
[[nodiscard]] u8 const* data() const { return m_inline ? m_inline_buffer : m_outline_buffer; }
|
2023-05-11 09:16:08 +00:00
|
|
|
#ifdef AK_COMPILER_GCC
|
|
|
|
# pragma GCC diagnostic pop
|
|
|
|
#endif
|
2020-07-27 12:15:37 +00:00
|
|
|
|
2023-05-11 09:16:08 +00:00
|
|
|
[[nodiscard]] Bytes bytes()
|
|
|
|
{
|
|
|
|
return { data(), size() };
|
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
[[nodiscard]] ReadonlyBytes bytes() const { return { data(), size() }; }
|
2020-12-30 23:35:15 +00:00
|
|
|
|
2023-02-05 19:02:54 +00:00
|
|
|
[[nodiscard]] AK::Bytes span() { return { data(), size() }; }
|
|
|
|
[[nodiscard]] AK::ReadonlyBytes span() const { return { data(), size() }; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2022-10-28 22:20:00 +00:00
|
|
|
[[nodiscard]] u8* offset_pointer(size_t offset) { return data() + offset; }
|
|
|
|
[[nodiscard]] u8 const* offset_pointer(size_t offset) const { return data() + offset; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
[[nodiscard]] void* end_pointer() { return data() + m_size; }
|
|
|
|
[[nodiscard]] void const* end_pointer() const { return data() + m_size; }
|
2019-04-25 20:05:53 +00:00
|
|
|
|
2022-06-13 12:20:42 +00:00
|
|
|
[[nodiscard]] ErrorOr<ByteBuffer> slice(size_t offset, size_t size) const
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
2020-04-27 17:23:39 +00:00
|
|
|
// I cannot hand you a slice I don't have
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(offset + size <= this->size());
|
2020-04-27 17:23:39 +00:00
|
|
|
|
2022-06-13 12:20:42 +00:00
|
|
|
return copy(offset_pointer(offset), size);
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
void clear()
|
|
|
|
{
|
2021-05-30 22:31:46 +00:00
|
|
|
if (!m_inline) {
|
2021-07-11 11:22:40 +00:00
|
|
|
kfree_sized(m_outline_buffer, m_outline_capacity);
|
2021-05-30 22:31:46 +00:00
|
|
|
m_inline = true;
|
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
m_size = 0;
|
|
|
|
}
|
|
|
|
|
2023-12-27 15:11:20 +00:00
|
|
|
enum class ZeroFillNewElements {
|
|
|
|
No,
|
|
|
|
Yes,
|
|
|
|
};
|
|
|
|
|
|
|
|
ALWAYS_INLINE void resize(size_t new_size, ZeroFillNewElements zero_fill_new_elements = ZeroFillNewElements::No)
|
2021-09-04 13:04:45 +00:00
|
|
|
{
|
2023-12-27 15:11:20 +00:00
|
|
|
MUST(try_resize(new_size, zero_fill_new_elements));
|
2021-09-04 13:04:45 +00:00
|
|
|
}
|
|
|
|
|
2023-11-18 06:53:53 +00:00
|
|
|
void trim(size_t size, bool may_discard_existing_data)
|
|
|
|
{
|
|
|
|
VERIFY(size <= m_size);
|
|
|
|
if (!m_inline && size <= inline_capacity)
|
|
|
|
shrink_into_inline_buffer(size, may_discard_existing_data);
|
|
|
|
m_size = size;
|
|
|
|
}
|
|
|
|
|
2021-09-04 13:04:45 +00:00
|
|
|
ALWAYS_INLINE void ensure_capacity(size_t new_capacity)
|
|
|
|
{
|
2021-11-10 13:33:44 +00:00
|
|
|
MUST(try_ensure_capacity(new_capacity));
|
2021-09-04 13:04:45 +00:00
|
|
|
}
|
|
|
|
|
2024-10-26 09:05:31 +00:00
|
|
|
void set_size(size_t new_size, ZeroFillNewElements zero_fill_new_elements = ZeroFillNewElements::No)
|
|
|
|
{
|
|
|
|
ASSERT(new_size <= capacity());
|
|
|
|
|
|
|
|
if (zero_fill_new_elements == ZeroFillNewElements::Yes) {
|
|
|
|
__builtin_memset(data() + m_size, 0, new_size - m_size);
|
|
|
|
}
|
|
|
|
|
|
|
|
m_size = new_size;
|
|
|
|
}
|
|
|
|
|
2023-12-27 15:11:20 +00:00
|
|
|
ErrorOr<void> try_resize(size_t new_size, ZeroFillNewElements zero_fill_new_elements = ZeroFillNewElements::No)
|
2019-01-18 02:27:51 +00:00
|
|
|
{
|
2021-05-30 22:31:46 +00:00
|
|
|
if (new_size <= m_size) {
|
2021-05-30 22:39:08 +00:00
|
|
|
trim(new_size, false);
|
2021-11-10 13:33:44 +00:00
|
|
|
return {};
|
2021-05-14 18:53:04 +00:00
|
|
|
}
|
2021-11-10 13:33:44 +00:00
|
|
|
TRY(try_ensure_capacity(new_size));
|
2023-12-27 15:11:20 +00:00
|
|
|
|
2024-10-26 09:05:31 +00:00
|
|
|
set_size(new_size, zero_fill_new_elements);
|
2023-12-27 15:11:20 +00:00
|
|
|
|
2021-11-10 13:33:44 +00:00
|
|
|
return {};
|
2021-05-30 22:31:46 +00:00
|
|
|
}
|
|
|
|
|
2021-11-10 13:33:44 +00:00
|
|
|
ErrorOr<void> try_ensure_capacity(size_t new_capacity)
|
2021-05-30 22:31:46 +00:00
|
|
|
{
|
|
|
|
if (new_capacity <= capacity())
|
2021-11-10 13:33:44 +00:00
|
|
|
return {};
|
2021-09-04 13:04:45 +00:00
|
|
|
return try_ensure_capacity_slowpath(new_capacity);
|
2019-01-18 02:27:51 +00:00
|
|
|
}
|
|
|
|
|
2021-12-18 11:29:51 +00:00
|
|
|
/// Return a span of bytes past the end of this ByteBuffer for writing.
|
|
|
|
/// Ensures that the required space is available.
|
|
|
|
ErrorOr<Bytes> get_bytes_for_writing(size_t length)
|
|
|
|
{
|
2022-02-27 22:16:50 +00:00
|
|
|
auto const old_size = size();
|
|
|
|
TRY(try_resize(old_size + length));
|
|
|
|
return Bytes { data() + old_size, length };
|
2021-12-18 11:29:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Like get_bytes_for_writing, but crashes if allocation fails.
|
|
|
|
Bytes must_get_bytes_for_writing(size_t length)
|
|
|
|
{
|
|
|
|
return MUST(get_bytes_for_writing(length));
|
|
|
|
}
|
|
|
|
|
2022-02-13 11:27:07 +00:00
|
|
|
void append(u8 byte)
|
2022-01-04 16:08:29 +00:00
|
|
|
{
|
|
|
|
MUST(try_append(byte));
|
|
|
|
}
|
|
|
|
|
2021-11-11 00:06:34 +00:00
|
|
|
void append(ReadonlyBytes bytes)
|
2021-05-27 05:38:56 +00:00
|
|
|
{
|
2021-11-10 13:33:44 +00:00
|
|
|
MUST(try_append(bytes));
|
2021-09-04 13:04:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void append(void const* data, size_t data_size) { append({ data, data_size }); }
|
|
|
|
|
2022-02-13 11:27:07 +00:00
|
|
|
ErrorOr<void> try_append(u8 byte)
|
2022-01-04 16:08:29 +00:00
|
|
|
{
|
2022-02-13 11:27:07 +00:00
|
|
|
auto old_size = size();
|
|
|
|
auto new_size = old_size + 1;
|
|
|
|
VERIFY(new_size > old_size);
|
|
|
|
TRY(try_resize(new_size));
|
|
|
|
data()[old_size] = byte;
|
|
|
|
return {};
|
2022-01-04 16:08:29 +00:00
|
|
|
}
|
|
|
|
|
2021-11-11 00:06:34 +00:00
|
|
|
ErrorOr<void> try_append(ReadonlyBytes bytes)
|
2021-09-04 13:04:45 +00:00
|
|
|
{
|
|
|
|
return try_append(bytes.data(), bytes.size());
|
2021-05-27 05:38:56 +00:00
|
|
|
}
|
|
|
|
|
2021-11-10 13:33:44 +00:00
|
|
|
ErrorOr<void> try_append(void const* data, size_t data_size)
|
2019-03-18 13:38:30 +00:00
|
|
|
{
|
2020-04-08 15:04:37 +00:00
|
|
|
if (data_size == 0)
|
2021-11-10 13:33:44 +00:00
|
|
|
return {};
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(data != nullptr);
|
2022-10-28 22:20:00 +00:00
|
|
|
auto old_size = size();
|
2021-11-10 13:33:44 +00:00
|
|
|
TRY(try_resize(size() + data_size));
|
2020-03-08 10:57:24 +00:00
|
|
|
__builtin_memcpy(this->data() + old_size, data, data_size);
|
2021-11-10 13:33:44 +00:00
|
|
|
return {};
|
2019-03-18 13:38:30 +00:00
|
|
|
}
|
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
void operator+=(ByteBuffer const& other)
|
2020-11-06 08:09:51 +00:00
|
|
|
{
|
2021-11-10 13:33:44 +00:00
|
|
|
MUST(try_append(other.data(), other.size()));
|
2020-11-06 08:09:51 +00:00
|
|
|
}
|
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
void overwrite(size_t offset, void const* data, size_t data_size)
|
2020-04-03 01:52:31 +00:00
|
|
|
{
|
2024-07-06 12:24:08 +00:00
|
|
|
#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Wstringop-overflow"
|
2020-04-03 01:52:31 +00:00
|
|
|
// make sure we're not told to write past the end
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(offset + data_size <= size());
|
2021-09-12 16:02:04 +00:00
|
|
|
__builtin_memmove(this->data() + offset, data, data_size);
|
2024-07-06 12:24:08 +00:00
|
|
|
#pragma GCC diagnostic pop
|
2020-04-03 01:52:31 +00:00
|
|
|
}
|
|
|
|
|
2021-02-21 10:07:14 +00:00
|
|
|
void zero_fill()
|
|
|
|
{
|
2021-05-14 18:53:04 +00:00
|
|
|
__builtin_memset(data(), 0, m_size);
|
2021-02-21 10:07:14 +00:00
|
|
|
}
|
|
|
|
|
2020-08-15 16:38:24 +00:00
|
|
|
operator Bytes() { return bytes(); }
|
|
|
|
operator ReadonlyBytes() const { return bytes(); }
|
|
|
|
|
2021-05-30 22:31:46 +00:00
|
|
|
ALWAYS_INLINE size_t capacity() const { return m_inline ? inline_capacity : m_outline_capacity; }
|
2024-07-19 19:38:41 +00:00
|
|
|
ALWAYS_INLINE bool is_inline() const { return m_inline; }
|
|
|
|
|
|
|
|
struct OutlineBuffer {
|
|
|
|
Bytes buffer;
|
|
|
|
size_t capacity { 0 };
|
|
|
|
};
|
|
|
|
Optional<OutlineBuffer> leak_outline_buffer(Badge<StringBuilder>)
|
|
|
|
{
|
|
|
|
if (m_inline)
|
|
|
|
return {};
|
|
|
|
|
|
|
|
auto buffer = bytes();
|
|
|
|
m_inline = true;
|
|
|
|
m_size = 0;
|
|
|
|
|
|
|
|
return OutlineBuffer { buffer, capacity() };
|
|
|
|
}
|
2021-05-30 22:31:46 +00:00
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
private:
|
2021-05-14 18:53:04 +00:00
|
|
|
void move_from(ByteBuffer&& other)
|
|
|
|
{
|
|
|
|
m_size = other.m_size;
|
2021-05-30 22:31:46 +00:00
|
|
|
m_inline = other.m_inline;
|
|
|
|
if (!other.m_inline) {
|
2021-05-14 18:53:04 +00:00
|
|
|
m_outline_buffer = other.m_outline_buffer;
|
|
|
|
m_outline_capacity = other.m_outline_capacity;
|
2021-09-05 22:59:52 +00:00
|
|
|
} else {
|
|
|
|
VERIFY(other.m_size <= inline_capacity);
|
2021-05-14 18:53:04 +00:00
|
|
|
__builtin_memcpy(m_inline_buffer, other.m_inline_buffer, other.m_size);
|
2021-09-05 22:59:52 +00:00
|
|
|
}
|
2021-05-14 18:53:04 +00:00
|
|
|
other.m_size = 0;
|
2021-05-30 22:31:46 +00:00
|
|
|
other.m_inline = true;
|
2021-05-14 18:53:04 +00:00
|
|
|
}
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2021-05-30 23:17:24 +00:00
|
|
|
NEVER_INLINE void shrink_into_inline_buffer(size_t size, bool may_discard_existing_data)
|
|
|
|
{
|
|
|
|
// m_inline_buffer and m_outline_buffer are part of a union, so save the pointer
|
2021-10-31 20:56:10 +00:00
|
|
|
auto* outline_buffer = m_outline_buffer;
|
2021-07-11 11:22:40 +00:00
|
|
|
auto outline_capacity = m_outline_capacity;
|
2021-05-30 23:17:24 +00:00
|
|
|
if (!may_discard_existing_data)
|
|
|
|
__builtin_memcpy(m_inline_buffer, outline_buffer, size);
|
2021-07-11 11:22:40 +00:00
|
|
|
kfree_sized(outline_buffer, outline_capacity);
|
2021-05-30 23:17:24 +00:00
|
|
|
m_inline = true;
|
|
|
|
}
|
|
|
|
|
2021-11-10 13:33:44 +00:00
|
|
|
NEVER_INLINE ErrorOr<void> try_ensure_capacity_slowpath(size_t new_capacity)
|
2021-05-27 08:52:46 +00:00
|
|
|
{
|
2023-01-09 14:31:27 +00:00
|
|
|
// When we are asked to raise the capacity by very small amounts,
|
|
|
|
// the caller is perhaps appending very little data in many calls.
|
|
|
|
// To avoid copying the entire ByteBuffer every single time,
|
|
|
|
// we raise the capacity exponentially, by a factor of roughly 1.5.
|
2023-04-12 17:32:21 +00:00
|
|
|
// This is most noticeable in Lagom, where kmalloc_good_size is just a no-op.
|
2023-01-09 14:31:27 +00:00
|
|
|
new_capacity = max(new_capacity, (capacity() * 3) / 2);
|
2021-05-30 22:31:46 +00:00
|
|
|
new_capacity = kmalloc_good_size(new_capacity);
|
2023-03-07 14:28:21 +00:00
|
|
|
auto* new_buffer = static_cast<u8*>(kmalloc(new_capacity));
|
2021-09-04 13:04:45 +00:00
|
|
|
if (!new_buffer)
|
2021-11-10 13:33:44 +00:00
|
|
|
return Error::from_errno(ENOMEM);
|
2021-09-04 13:04:45 +00:00
|
|
|
|
|
|
|
if (m_inline) {
|
2021-05-27 08:52:46 +00:00
|
|
|
__builtin_memcpy(new_buffer, data(), m_size);
|
2021-09-04 13:04:45 +00:00
|
|
|
} else if (m_outline_buffer) {
|
|
|
|
__builtin_memcpy(new_buffer, m_outline_buffer, min(new_capacity, m_outline_capacity));
|
|
|
|
kfree_sized(m_outline_buffer, m_outline_capacity);
|
2021-05-27 08:52:46 +00:00
|
|
|
}
|
2021-09-04 13:04:45 +00:00
|
|
|
|
2021-05-27 08:52:46 +00:00
|
|
|
m_outline_buffer = new_buffer;
|
|
|
|
m_outline_capacity = new_capacity;
|
2021-05-30 22:31:46 +00:00
|
|
|
m_inline = false;
|
2021-11-10 13:33:44 +00:00
|
|
|
return {};
|
2021-05-27 08:52:46 +00:00
|
|
|
}
|
|
|
|
|
2021-05-14 18:53:04 +00:00
|
|
|
union {
|
2021-05-27 08:52:46 +00:00
|
|
|
u8 m_inline_buffer[inline_capacity];
|
2021-05-14 18:53:04 +00:00
|
|
|
struct {
|
|
|
|
u8* m_outline_buffer;
|
|
|
|
size_t m_outline_capacity;
|
|
|
|
};
|
|
|
|
};
|
2021-05-30 22:31:46 +00:00
|
|
|
size_t m_size { 0 };
|
|
|
|
bool m_inline { true };
|
2021-05-14 18:53:04 +00:00
|
|
|
};
|
2019-03-16 12:12:13 +00:00
|
|
|
|
|
|
|
}
|
2022-07-11 21:18:18 +00:00
|
|
|
|
|
|
|
template<>
|
2023-11-08 19:29:12 +00:00
|
|
|
struct Traits<ByteBuffer> : public DefaultTraits<ByteBuffer> {
|
2022-07-11 21:18:18 +00:00
|
|
|
static unsigned hash(ByteBuffer const& byte_buffer)
|
|
|
|
{
|
|
|
|
return Traits<ReadonlyBytes>::hash(byte_buffer.span());
|
|
|
|
}
|
2023-02-10 19:12:10 +00:00
|
|
|
static bool equals(ByteBuffer const& byte_buffer, Bytes const& other)
|
|
|
|
{
|
|
|
|
return byte_buffer.bytes() == other;
|
|
|
|
}
|
|
|
|
static bool equals(ByteBuffer const& byte_buffer, ReadonlyBytes const& other)
|
|
|
|
{
|
|
|
|
return byte_buffer.bytes() == other;
|
|
|
|
}
|
2022-07-11 21:18:18 +00:00
|
|
|
};
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|