2018-10-10 09:53:07 +00:00
|
|
|
#pragma once
|
|
|
|
|
2018-12-03 23:27:16 +00:00
|
|
|
#include "StdLibExtras.h"
|
2019-05-28 09:53:16 +00:00
|
|
|
#include "Types.h"
|
2019-06-21 16:45:35 +00:00
|
|
|
#include <AK/RefPtr.h>
|
2019-06-21 16:58:45 +00:00
|
|
|
#include <AK/RefCounted.h>
|
2019-03-16 12:12:13 +00:00
|
|
|
#include <AK/kmalloc.h>
|
2018-10-10 09:53:07 +00:00
|
|
|
|
|
|
|
namespace AK {
|
|
|
|
|
2019-06-21 13:29:31 +00:00
|
|
|
class ByteBufferImpl : public RefCounted<ByteBufferImpl> {
|
2019-03-16 12:12:13 +00:00
|
|
|
public:
|
2019-06-21 16:37:47 +00:00
|
|
|
static NonnullRefPtr<ByteBufferImpl> create_uninitialized(int size);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> create_zeroed(int);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> copy(const void*, int);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> wrap(void*, int);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> wrap(const void*, int);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> adopt(void*, int);
|
2019-03-16 12:12:13 +00:00
|
|
|
|
|
|
|
~ByteBufferImpl() { clear(); }
|
|
|
|
|
|
|
|
void clear()
|
|
|
|
{
|
|
|
|
if (!m_data)
|
|
|
|
return;
|
|
|
|
if (m_owned)
|
|
|
|
kfree(m_data);
|
|
|
|
m_data = nullptr;
|
|
|
|
}
|
|
|
|
|
2019-05-28 09:53:16 +00:00
|
|
|
byte& operator[](int i)
|
|
|
|
{
|
|
|
|
ASSERT(i < m_size);
|
|
|
|
return m_data[i];
|
|
|
|
}
|
|
|
|
const byte& operator[](int i) const
|
|
|
|
{
|
|
|
|
ASSERT(i < m_size);
|
|
|
|
return m_data[i];
|
|
|
|
}
|
2019-03-16 12:12:13 +00:00
|
|
|
bool is_empty() const { return !m_size; }
|
|
|
|
int size() const { return m_size; }
|
|
|
|
|
|
|
|
byte* pointer() { return m_data; }
|
|
|
|
const byte* pointer() const { return m_data; }
|
|
|
|
|
|
|
|
byte* offset_pointer(int offset) { return m_data + offset; }
|
|
|
|
const byte* offset_pointer(int offset) const { return m_data + offset; }
|
|
|
|
|
2019-03-18 13:38:30 +00:00
|
|
|
void* end_pointer() { return m_data + m_size; }
|
2019-03-16 12:12:13 +00:00
|
|
|
const void* end_pointer() const { return m_data + m_size; }
|
|
|
|
|
|
|
|
// NOTE: trim() does not reallocate.
|
|
|
|
void trim(int size)
|
|
|
|
{
|
|
|
|
ASSERT(size <= m_size);
|
|
|
|
m_size = size;
|
|
|
|
}
|
|
|
|
|
|
|
|
void grow(int size);
|
|
|
|
|
|
|
|
private:
|
2019-06-07 15:13:23 +00:00
|
|
|
enum ConstructionMode {
|
2019-05-28 09:53:16 +00:00
|
|
|
Uninitialized,
|
|
|
|
Copy,
|
|
|
|
Wrap,
|
|
|
|
Adopt
|
|
|
|
};
|
|
|
|
explicit ByteBufferImpl(int); // For ConstructionMode=Uninitialized
|
2019-03-16 12:12:13 +00:00
|
|
|
ByteBufferImpl(const void*, int, ConstructionMode); // For ConstructionMode=Copy
|
2019-05-28 09:53:16 +00:00
|
|
|
ByteBufferImpl(void*, int, ConstructionMode); // For ConstructionMode=Wrap/Adopt
|
|
|
|
ByteBufferImpl() {}
|
2019-03-16 12:12:13 +00:00
|
|
|
|
|
|
|
byte* m_data { nullptr };
|
|
|
|
int m_size { 0 };
|
|
|
|
bool m_owned { false };
|
|
|
|
};
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
class ByteBuffer {
|
|
|
|
public:
|
2019-05-28 09:53:16 +00:00
|
|
|
ByteBuffer() {}
|
|
|
|
ByteBuffer(std::nullptr_t) {}
|
2018-10-10 09:53:07 +00:00
|
|
|
ByteBuffer(const ByteBuffer& other)
|
2019-01-31 16:31:23 +00:00
|
|
|
: m_impl(other.m_impl.copy_ref())
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
ByteBuffer(ByteBuffer&& other)
|
2018-10-16 10:10:01 +00:00
|
|
|
: m_impl(move(other.m_impl))
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
ByteBuffer& operator=(ByteBuffer&& other)
|
|
|
|
{
|
|
|
|
if (this != &other)
|
2018-10-16 10:10:01 +00:00
|
|
|
m_impl = move(other.m_impl);
|
2018-10-10 09:53:07 +00:00
|
|
|
return *this;
|
|
|
|
}
|
2018-10-26 22:14:24 +00:00
|
|
|
ByteBuffer& operator=(const ByteBuffer& other)
|
|
|
|
{
|
2019-01-31 16:31:23 +00:00
|
|
|
m_impl = other.m_impl.copy_ref();
|
2018-10-26 22:14:24 +00:00
|
|
|
return *this;
|
|
|
|
}
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2019-06-14 04:43:56 +00:00
|
|
|
static ByteBuffer create_uninitialized(int size) { return ByteBuffer(ByteBufferImpl::create_uninitialized(size)); }
|
|
|
|
static ByteBuffer create_zeroed(int size) { return ByteBuffer(ByteBufferImpl::create_zeroed(size)); }
|
|
|
|
static ByteBuffer copy(const void* data, int size) { return ByteBuffer(ByteBufferImpl::copy(data, size)); }
|
|
|
|
static ByteBuffer wrap(const void* data, int size) { return ByteBuffer(ByteBufferImpl::wrap(data, size)); }
|
|
|
|
static ByteBuffer wrap(void* data, int size) { return ByteBuffer(ByteBufferImpl::wrap(data, size)); }
|
|
|
|
static ByteBuffer adopt(void* data, int size) { return ByteBuffer(ByteBufferImpl::adopt(data, size)); }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
|
|
|
~ByteBuffer() { clear(); }
|
|
|
|
void clear() { m_impl = nullptr; }
|
|
|
|
|
2018-12-21 01:10:45 +00:00
|
|
|
operator bool() const { return !is_null(); }
|
|
|
|
bool operator!() const { return is_null(); }
|
|
|
|
bool is_null() const { return m_impl == nullptr; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2019-06-14 04:43:56 +00:00
|
|
|
byte& operator[](int i)
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
|
|
|
ASSERT(m_impl);
|
|
|
|
return (*m_impl)[i];
|
|
|
|
}
|
2019-06-14 04:43:56 +00:00
|
|
|
byte operator[](int i) const
|
2019-05-28 09:53:16 +00:00
|
|
|
{
|
|
|
|
ASSERT(m_impl);
|
|
|
|
return (*m_impl)[i];
|
|
|
|
}
|
2018-12-21 01:10:45 +00:00
|
|
|
bool is_empty() const { return !m_impl || m_impl->is_empty(); }
|
2019-06-14 04:43:56 +00:00
|
|
|
int size() const { return m_impl ? m_impl->size() : 0; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2019-04-20 12:13:40 +00:00
|
|
|
byte* data() { return pointer(); }
|
|
|
|
const byte* data() const { return pointer(); }
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
byte* pointer() { return m_impl ? m_impl->pointer() : nullptr; }
|
|
|
|
const byte* pointer() const { return m_impl ? m_impl->pointer() : nullptr; }
|
|
|
|
|
2019-06-14 04:43:56 +00:00
|
|
|
byte* offset_pointer(int offset) { return m_impl ? m_impl->offset_pointer(offset) : nullptr; }
|
|
|
|
const byte* offset_pointer(int offset) const { return m_impl ? m_impl->offset_pointer(offset) : nullptr; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2019-03-18 13:38:30 +00:00
|
|
|
void* end_pointer() { return m_impl ? m_impl->end_pointer() : nullptr; }
|
2018-12-21 01:10:45 +00:00
|
|
|
const void* end_pointer() const { return m_impl ? m_impl->end_pointer() : nullptr; }
|
2018-10-10 09:53:07 +00:00
|
|
|
|
2019-04-25 20:05:53 +00:00
|
|
|
ByteBuffer isolated_copy() const
|
|
|
|
{
|
|
|
|
if (!m_impl)
|
2019-05-28 09:53:16 +00:00
|
|
|
return {};
|
2019-04-25 20:05:53 +00:00
|
|
|
return copy(m_impl->pointer(), m_impl->size());
|
|
|
|
}
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
// NOTE: trim() does not reallocate.
|
2019-06-14 04:43:56 +00:00
|
|
|
void trim(int size)
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
|
|
|
if (m_impl)
|
|
|
|
m_impl->trim(size);
|
|
|
|
}
|
|
|
|
|
2019-06-14 04:43:56 +00:00
|
|
|
ByteBuffer slice(int offset, int size) const
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
2018-12-21 01:10:45 +00:00
|
|
|
if (is_null())
|
2019-05-28 09:53:16 +00:00
|
|
|
return {};
|
2018-10-10 09:53:07 +00:00
|
|
|
if (offset >= this->size())
|
2019-05-28 09:53:16 +00:00
|
|
|
return {};
|
2018-10-10 09:53:07 +00:00
|
|
|
if (offset + size >= this->size())
|
|
|
|
size = this->size() - offset;
|
2018-12-21 01:10:45 +00:00
|
|
|
return copy(offset_pointer(offset), size);
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|
|
|
|
|
2019-06-14 04:43:56 +00:00
|
|
|
void grow(int size)
|
2019-01-18 02:27:51 +00:00
|
|
|
{
|
|
|
|
if (!m_impl)
|
2019-03-16 12:12:13 +00:00
|
|
|
m_impl = ByteBufferImpl::create_uninitialized(size);
|
2019-01-18 02:27:51 +00:00
|
|
|
else
|
|
|
|
m_impl->grow(size);
|
|
|
|
}
|
|
|
|
|
2019-03-18 13:38:30 +00:00
|
|
|
void append(const void* data, int data_size)
|
|
|
|
{
|
|
|
|
int old_size = size();
|
|
|
|
grow(size() + data_size);
|
|
|
|
memcpy(pointer() + old_size, data, data_size);
|
|
|
|
}
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
private:
|
2019-06-21 16:37:47 +00:00
|
|
|
explicit ByteBuffer(RefPtr<ByteBufferImpl>&& impl)
|
2018-10-16 10:10:01 +00:00
|
|
|
: m_impl(move(impl))
|
2018-10-10 09:53:07 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2019-06-21 16:37:47 +00:00
|
|
|
RefPtr<ByteBufferImpl> m_impl;
|
2018-10-10 09:53:07 +00:00
|
|
|
};
|
|
|
|
|
2019-03-16 12:12:13 +00:00
|
|
|
inline ByteBufferImpl::ByteBufferImpl(int size)
|
|
|
|
: m_size(size)
|
|
|
|
{
|
|
|
|
m_data = static_cast<byte*>(kmalloc(size));
|
|
|
|
m_owned = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline ByteBufferImpl::ByteBufferImpl(const void* data, int size, ConstructionMode mode)
|
|
|
|
: m_size(size)
|
|
|
|
{
|
|
|
|
ASSERT(mode == Copy);
|
|
|
|
m_data = static_cast<byte*>(kmalloc(size));
|
|
|
|
memcpy(m_data, data, size);
|
|
|
|
m_owned = true;
|
|
|
|
}
|
|
|
|
|
2019-06-14 04:43:56 +00:00
|
|
|
inline ByteBufferImpl::ByteBufferImpl(void* data, int size, ConstructionMode mode)
|
2019-03-16 12:12:13 +00:00
|
|
|
: m_data(static_cast<byte*>(data))
|
|
|
|
, m_size(size)
|
|
|
|
{
|
|
|
|
if (mode == Adopt) {
|
|
|
|
m_owned = true;
|
|
|
|
} else if (mode == Wrap) {
|
|
|
|
m_owned = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-14 04:43:56 +00:00
|
|
|
inline void ByteBufferImpl::grow(int size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
|
|
|
ASSERT(size > m_size);
|
|
|
|
ASSERT(m_owned);
|
|
|
|
byte* new_data = static_cast<byte*>(kmalloc(size));
|
|
|
|
memcpy(new_data, m_data, m_size);
|
|
|
|
byte* old_data = m_data;
|
|
|
|
m_data = new_data;
|
|
|
|
m_size = size;
|
|
|
|
kfree(old_data);
|
|
|
|
}
|
|
|
|
|
2019-06-21 16:37:47 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::create_uninitialized(int size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
|
|
|
return ::adopt(*new ByteBufferImpl(size));
|
|
|
|
}
|
|
|
|
|
2019-06-21 16:37:47 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::create_zeroed(int size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
|
|
|
auto buffer = ::adopt(*new ByteBufferImpl(size));
|
|
|
|
memset(buffer->pointer(), 0, size);
|
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
2019-06-21 16:37:47 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::copy(const void* data, int size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
|
|
|
return ::adopt(*new ByteBufferImpl(data, size, Copy));
|
|
|
|
}
|
|
|
|
|
2019-06-21 16:37:47 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::wrap(void* data, int size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
|
|
|
return ::adopt(*new ByteBufferImpl(data, size, Wrap));
|
|
|
|
}
|
|
|
|
|
2019-06-21 16:37:47 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::wrap(const void* data, int size)
|
2019-04-23 11:00:53 +00:00
|
|
|
{
|
|
|
|
return ::adopt(*new ByteBufferImpl(const_cast<void*>(data), size, Wrap));
|
|
|
|
}
|
|
|
|
|
2019-06-21 16:37:47 +00:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::adopt(void* data, int size)
|
2019-03-16 12:12:13 +00:00
|
|
|
{
|
|
|
|
return ::adopt(*new ByteBufferImpl(data, size, Adopt));
|
|
|
|
}
|
|
|
|
|
2018-10-10 09:53:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
using AK::ByteBuffer;
|