2020-01-18 08:38:21 +00:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
|
|
|
|
*
|
2021-04-22 08:24:48 +00:00
|
|
|
* SPDX-License-Identifier: BSD-2-Clause
|
2020-01-18 08:38:21 +00:00
|
|
|
*/
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
#pragma once
|
|
|
|
|
2019-07-04 05:05:58 +00:00
|
|
|
#include <AK/NonnullRefPtr.h>
|
2019-06-21 16:58:45 +00:00
|
|
|
#include <AK/RefCounted.h>
|
2019-07-04 05:05:58 +00:00
|
|
|
#include <AK/RefPtr.h>
|
2020-07-27 12:15:37 +00:00
|
|
|
#include <AK/Span.h>
|
2019-09-11 17:35:14 +00:00
|
|
|
#include <AK/Types.h>
|
2019-03-16 12:12:13 +00:00
|
|
|
#include <AK/kmalloc.h>
|
2018-10-10 09:53:07 +00:00
|
|
|
|
|
|
|
namespace AK {
|
|
|
|
|
2019-06-21 13:29:31 +00:00
|
|
|
class ByteBufferImpl : public RefCounted<ByteBufferImpl> {
|
2019-03-16 12:12:13 +00:00
|
|
|
public:
|
2020-02-20 11:54:15 +00:00
|
|
|
static NonnullRefPtr<ByteBufferImpl> create_uninitialized(size_t size);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> create_zeroed(size_t);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> copy(const void*, size_t);
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2021-01-10 23:29:28 +00:00
|
|
|
ByteBufferImpl() = delete;
|
2019-03-16 12:12:13 +00:00
|
|
|
~ByteBufferImpl() { clear(); }
|
|
|
|
|
|
|
|
void clear()
|
|
|
|
{
|
|
|
|
if (!m_data)
|
|
|
|
return;
|
2020-12-19 17:23:34 +00:00
|
|
|
kfree(m_data);
|
2019-03-16 12:12:13 +00:00
|
|
|
m_data = nullptr;
|
|
|
|
}
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
u8& operator[](size_t i)
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(i < m_size);
|
2019-05-28 09:53:16 +00:00
|
|
|
return m_data[i];
|
|
|
|
}
|
2020-02-20 11:54:15 +00:00
|
|
|
const u8& operator[](size_t i) const
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(i < m_size);
|
2019-05-28 09:53:16 +00:00
|
|
|
return m_data[i];
|
|
|
|
}
|
2019-03-16 12:12:13 +00:00
|
|
|
bool is_empty() const { return !m_size; }
|
2020-02-20 11:54:15 +00:00
|
|
|
size_t size() const { return m_size; }
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2019-09-30 06:57:01 +00:00
|
|
|
u8* data() { return m_data; }
|
|
|
|
const u8* data() const { return m_data; }
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2020-08-15 16:38:24 +00:00
|
|
|
Bytes bytes() { return { data(), size() }; }
|
|
|
|
ReadonlyBytes bytes() const { return { data(), size() }; }
|
2020-07-27 12:15:37 +00:00
|
|
|
|
2020-12-30 23:35:15 +00:00
|
|
|
Span<u8> span() { return { data(), size() }; }
|
|
|
|
Span<const u8> span() const { return { data(), size() }; }
|
|
|
|
|
2019-07-03 19:17:35 +00:00
|
|
|
u8* offset_pointer(int offset) { return m_data + offset; }
|
|
|
|
const u8* offset_pointer(int offset) const { return m_data + offset; }
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2019-03-18 13:38:30 +00:00
|
|
|
void* end_pointer() { return m_data + m_size; }
|
2019-03-16 12:12:13 +00:00
|
|
|
const void* end_pointer() const { return m_data + m_size; }
|
|
|
|
|
|
|
|
// NOTE: trim() does not reallocate.
|
2020-02-20 11:54:15 +00:00
|
|
|
void trim(size_t size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(size <= m_size);
|
2019-03-16 12:12:13 +00:00
|
|
|
m_size = size;
|
|
|
|
}
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
void grow(size_t size);
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2021-02-21 10:07:14 +00:00
|
|
|
void zero_fill();
|
|
|
|
|
2019-03-16 12:12:13 +00:00
|
|
|
private:
|
2020-12-19 17:23:34 +00:00
|
|
|
explicit ByteBufferImpl(size_t);
|
|
|
|
ByteBufferImpl(const void*, size_t);
|
2019-03-16 12:12:13 +00:00
|
|
|
|
2019-07-03 19:17:35 +00:00
|
|
|
u8* m_data { nullptr };
|
2020-02-20 11:54:15 +00:00
|
|
|
size_t m_size { 0 };
|
2019-03-16 12:12:13 +00:00
|
|
|
};
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
class ByteBuffer {
|
|
|
|
public:
|
2021-01-10 23:29:28 +00:00
|
|
|
ByteBuffer() = default;
|
2018-10-10 09:53:07 +00:00
|
|
|
ByteBuffer(const ByteBuffer& other)
|
2019-07-11 13:45:11 +00:00
|
|
|
: m_impl(other.m_impl)
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
ByteBuffer(ByteBuffer&& other)
|
2018-10-16 10:10:01 +00:00
|
|
|
: m_impl(move(other.m_impl))
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
ByteBuffer& operator=(ByteBuffer&& other)
|
|
|
|
{
|
|
|
|
if (this != &other)
|
2018-10-16 10:10:01 +00:00
|
|
|
m_impl = move(other.m_impl);
|
2018-10-10 09:53:07 +00:00
|
|
|
return *this;
|
|
|
|
}
|
2018-10-26 22:14:24 +00:00
|
|
|
ByteBuffer& operator=(const ByteBuffer& other)
|
|
|
|
{
|
2019-07-11 13:45:11 +00:00
|
|
|
if (this != &other)
|
|
|
|
m_impl = other.m_impl;
|
2018-10-26 22:14:24 +00:00
|
|
|
return *this;
|
|
|
|
}
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] static ByteBuffer create_uninitialized(size_t size) { return ByteBuffer(ByteBufferImpl::create_uninitialized(size)); }
|
|
|
|
[[nodiscard]] static ByteBuffer create_zeroed(size_t size) { return ByteBuffer(ByteBufferImpl::create_zeroed(size)); }
|
|
|
|
[[nodiscard]] static ByteBuffer copy(const void* data, size_t size) { return ByteBuffer(ByteBufferImpl::copy(data, size)); }
|
|
|
|
[[nodiscard]] static ByteBuffer copy(ReadonlyBytes bytes) { return ByteBuffer(ByteBufferImpl::copy(bytes.data(), bytes.size())); }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
|
|
|
~ByteBuffer() { clear(); }
|
|
|
|
void clear() { m_impl = nullptr; }
|
|
|
|
|
2018-12-21 01:10:45 +00:00
|
|
|
operator bool() const { return !is_null(); }
|
|
|
|
bool operator!() const { return is_null(); }
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] bool is_null() const { return m_impl == nullptr; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2020-08-22 14:10:19 +00:00
|
|
|
// Disable default implementations that would use surprising integer promotion.
|
|
|
|
bool operator==(const ByteBuffer& other) const;
|
|
|
|
bool operator!=(const ByteBuffer& other) const { return !(*this == other); }
|
|
|
|
bool operator<=(const ByteBuffer& other) const = delete;
|
|
|
|
bool operator>=(const ByteBuffer& other) const = delete;
|
|
|
|
bool operator<(const ByteBuffer& other) const = delete;
|
|
|
|
bool operator>(const ByteBuffer& other) const = delete;
|
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] u8& operator[](size_t i)
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(m_impl);
|
2019-05-28 09:53:16 +00:00
|
|
|
return (*m_impl)[i];
|
|
|
|
}
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] u8 operator[](size_t i) const
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(m_impl);
|
2019-05-28 09:53:16 +00:00
|
|
|
return (*m_impl)[i];
|
|
|
|
}
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] bool is_empty() const { return !m_impl || m_impl->is_empty(); }
|
|
|
|
[[nodiscard]] size_t size() const { return m_impl ? m_impl->size() : 0; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] u8* data() { return m_impl ? m_impl->data() : nullptr; }
|
|
|
|
[[nodiscard]] const u8* data() const { return m_impl ? m_impl->data() : nullptr; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] Bytes bytes()
|
2021-01-10 23:29:28 +00:00
|
|
|
{
|
|
|
|
if (m_impl) {
|
|
|
|
return m_impl->bytes();
|
|
|
|
}
|
|
|
|
return {};
|
|
|
|
}
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] ReadonlyBytes bytes() const
|
2021-01-10 23:29:28 +00:00
|
|
|
{
|
|
|
|
if (m_impl) {
|
|
|
|
return m_impl->bytes();
|
|
|
|
}
|
|
|
|
return {};
|
|
|
|
}
|
2020-07-27 12:15:37 +00:00
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] Span<u8> span()
|
2021-01-10 23:29:28 +00:00
|
|
|
{
|
|
|
|
if (m_impl) {
|
|
|
|
return m_impl->span();
|
|
|
|
}
|
|
|
|
return {};
|
|
|
|
}
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] Span<const u8> span() const
|
2021-01-10 23:29:28 +00:00
|
|
|
{
|
|
|
|
if (m_impl) {
|
|
|
|
return m_impl->span();
|
|
|
|
}
|
|
|
|
return {};
|
|
|
|
}
|
2020-12-30 23:35:15 +00:00
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] u8* offset_pointer(int offset) { return m_impl ? m_impl->offset_pointer(offset) : nullptr; }
|
|
|
|
[[nodiscard]] const u8* offset_pointer(int offset) const { return m_impl ? m_impl->offset_pointer(offset) : nullptr; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] void* end_pointer() { return m_impl ? m_impl->end_pointer() : nullptr; }
|
|
|
|
[[nodiscard]] const void* end_pointer() const { return m_impl ? m_impl->end_pointer() : nullptr; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] ByteBuffer isolated_copy() const
|
2019-04-25 20:05:53 +00:00
|
|
|
{
|
|
|
|
if (!m_impl)
|
2019-05-28 09:53:16 +00:00
|
|
|
return {};
|
2019-09-30 06:57:01 +00:00
|
|
|
return copy(m_impl->data(), m_impl->size());
|
2019-04-25 20:05:53 +00:00
|
|
|
}
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
// NOTE: trim() does not reallocate.
|
2020-02-20 11:54:15 +00:00
|
|
|
void trim(size_t size)
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
|
|
|
if (m_impl)
|
|
|
|
m_impl->trim(size);
|
|
|
|
}
|
|
|
|
|
2021-04-21 19:40:42 +00:00
|
|
|
[[nodiscard]] ByteBuffer slice(size_t offset, size_t size) const
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
2018-12-21 01:10:45 +00:00
|
|
|
if (is_null())
|
2019-05-28 09:53:16 +00:00
|
|
|
return {};
|
2020-04-27 17:23:39 +00:00
|
|
|
|
2021-03-15 14:46:54 +00:00
|
|
|
if (offset == 0 && size == this->size())
|
|
|
|
return *this;
|
|
|
|
|
2020-04-27 17:23:39 +00:00
|
|
|
// I cannot hand you a slice I don't have
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(offset + size <= this->size());
|
2020-04-27 17:23:39 +00:00
|
|
|
|
2018-12-21 01:10:45 +00:00
|
|
|
return copy(offset_pointer(offset), size);
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
void grow(size_t size)
|
2019-01-18 02:27:51 +00:00
|
|
|
{
|
|
|
|
if (!m_impl)
|
2019-03-16 12:12:13 +00:00
|
|
|
m_impl = ByteBufferImpl::create_uninitialized(size);
|
2019-01-18 02:27:51 +00:00
|
|
|
else
|
|
|
|
m_impl->grow(size);
|
|
|
|
}
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
void append(const void* data, size_t data_size)
|
2019-03-18 13:38:30 +00:00
|
|
|
{
|
2020-04-08 15:04:37 +00:00
|
|
|
if (data_size == 0)
|
|
|
|
return;
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(data != nullptr);
|
2019-03-18 13:38:30 +00:00
|
|
|
int old_size = size();
|
|
|
|
grow(size() + data_size);
|
2020-03-08 10:57:24 +00:00
|
|
|
__builtin_memcpy(this->data() + old_size, data, data_size);
|
2019-03-18 13:38:30 +00:00
|
|
|
}
|
|
|
|
|
2020-11-06 08:09:51 +00:00
|
|
|
void operator+=(const ByteBuffer& other)
|
|
|
|
{
|
|
|
|
append(other.data(), other.size());
|
|
|
|
}
|
|
|
|
|
2020-04-03 01:52:31 +00:00
|
|
|
void overwrite(size_t offset, const void* data, size_t data_size)
|
|
|
|
{
|
|
|
|
// make sure we're not told to write past the end
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(offset + data_size <= size());
|
2020-04-03 01:52:31 +00:00
|
|
|
__builtin_memcpy(this->data() + offset, data, data_size);
|
|
|
|
}
|
|
|
|
|
2021-02-21 10:07:14 +00:00
|
|
|
void zero_fill()
|
|
|
|
{
|
|
|
|
m_impl->zero_fill();
|
|
|
|
}
|
|
|
|
|
2020-08-15 16:38:24 +00:00
|
|
|
operator Bytes() { return bytes(); }
|
|
|
|
operator ReadonlyBytes() const { return bytes(); }
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
private:
|
2019-06-21 16:37:47 +00:00
|
|
|
explicit ByteBuffer(RefPtr<ByteBufferImpl>&& impl)
|
2018-10-16 10:10:01 +00:00
|
|
|
: m_impl(move(impl))
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2019-06-21 16:37:47 +00:00
|
|
|
RefPtr<ByteBufferImpl> m_impl;
|
2018-10-10 09:53:07 +00:00
|
|
|
};
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
inline ByteBufferImpl::ByteBufferImpl(size_t size)
|
2019-03-16 12:12:13 +00:00
|
|
|
: m_size(size)
|
|
|
|
{
|
2020-08-30 15:00:57 +00:00
|
|
|
if (size != 0)
|
|
|
|
m_data = static_cast<u8*>(kmalloc(size));
|
2019-03-16 12:12:13 +00:00
|
|
|
}
|
|
|
|
|
2020-12-19 17:23:34 +00:00
|
|
|
inline ByteBufferImpl::ByteBufferImpl(const void* data, size_t size)
|
2019-03-16 12:12:13 +00:00
|
|
|
: m_size(size)
|
|
|
|
{
|
2020-08-30 15:00:57 +00:00
|
|
|
if (size != 0) {
|
|
|
|
m_data = static_cast<u8*>(kmalloc(size));
|
|
|
|
__builtin_memcpy(m_data, data, size);
|
|
|
|
}
|
2019-03-16 12:12:13 +00:00
|
|
|
}
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
inline void ByteBufferImpl::grow(size_t size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
2021-02-23 19:42:32 +00:00
|
|
|
VERIFY(size > m_size);
|
2020-08-30 15:00:57 +00:00
|
|
|
if (size == 0) {
|
|
|
|
if (m_data)
|
|
|
|
kfree(m_data);
|
|
|
|
m_data = nullptr;
|
|
|
|
m_size = 0;
|
|
|
|
return;
|
|
|
|
}
|
2019-07-03 19:17:35 +00:00
|
|
|
u8* new_data = static_cast<u8*>(kmalloc(size));
|
2020-03-08 10:57:24 +00:00
|
|
|
__builtin_memcpy(new_data, m_data, m_size);
|
2019-07-03 19:17:35 +00:00
|
|
|
u8* old_data = m_data;
|
2019-03-16 12:12:13 +00:00
|
|
|
m_data = new_data;
|
|
|
|
m_size = size;
|
2020-08-30 15:00:57 +00:00
|
|
|
if (old_data)
|
|
|
|
kfree(old_data);
|
2019-03-16 12:12:13 +00:00
|
|
|
}
|
|
|
|
|
2021-02-21 10:07:14 +00:00
|
|
|
inline void ByteBufferImpl::zero_fill()
|
|
|
|
{
|
|
|
|
__builtin_memset(m_data, 0, m_size);
|
|
|
|
}
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::create_uninitialized(size_t size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
|
|
|
return ::adopt(*new ByteBufferImpl(size));
|
|
|
|
}
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::create_zeroed(size_t size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
|
|
|
auto buffer = ::adopt(*new ByteBufferImpl(size));
|
2020-08-30 15:00:57 +00:00
|
|
|
if (size != 0)
|
|
|
|
__builtin_memset(buffer->data(), 0, size);
|
2019-03-16 12:12:13 +00:00
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
2020-02-20 11:54:15 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::copy(const void* data, size_t size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
2020-12-19 17:23:34 +00:00
|
|
|
return ::adopt(*new ByteBufferImpl(data, size));
|
2019-03-16 12:12:13 +00:00
|
|
|
}
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
using AK::ByteBuffer;
|