diff --git a/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/realloc_vector.hpp b/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/realloc_vector.hpp new file mode 100644 index 0000000..38b05aa --- /dev/null +++ b/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/realloc_vector.hpp @@ -0,0 +1,24 @@ +// +// realloc_vector.hpp +// OpenRenderBoxCxx + +#ifndef realloc_vector_hpp +#define realloc_vector_hpp + +#include + +ORB_ASSUME_NONNULL_BEGIN + +namespace ORB { +namespace details { +template +void *_Nullable realloc_vector(void* ptr, T& size, T new_size); + +template +void *_Nullable realloc_vector(void* src, void* dst, T dstSize, T& size, T newSize); +} /* details */ +} /* ORB */ + +ORB_ASSUME_NONNULL_END + +#endif /* realloc_vector_hpp */ diff --git a/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/vector.hpp b/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/vector.hpp new file mode 100644 index 0000000..fdf4ae4 --- /dev/null +++ b/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/vector.hpp @@ -0,0 +1,240 @@ +// +// vector.hpp +// OpenRenderBoxCxx +// +// Status: Complete +// Modified based Compute code + +#ifndef vector_hpp +#define vector_hpp + +#include +#include +#include +#include + +ORB_ASSUME_NONNULL_BEGIN + +namespace ORB { + +template + requires std::unsigned_integral +class vector { + private: + T _stack_buffer[_stack_size]; + T *_Nullable _buffer = nullptr; + size_type _size = 0; + size_type _capacity = _stack_size; + + void reserve_slow(size_type new_cap); + + public: + using value_type = T; + using reference = value_type &; + using const_reference = const value_type &; + using iterator = value_type *_Nonnull; + using const_iterator = const value_type *_Nonnull; + using reverse_iterator = std::reverse_iterator; + using const_reverse_iterator = std::reverse_iterator; + + ~vector(); + + // Element access + + reference operator[](size_type pos) { return data()[pos]; }; + const_reference operator[](size_type pos) const { return data()[pos]; }; + + reference front() { return &data()[0]; }; + const_reference front() const { return *&data()[0]; }; + reference back() { return *&data()[_size - 1]; }; + const_reference back() const { return *&data()[_size - 1]; }; + + T *_Nonnull data() { return _buffer != nullptr ? _buffer : _stack_buffer; }; + const T *_Nonnull data() const { return _buffer != nullptr ? _buffer : _stack_buffer; }; + + // Iterators + + iterator begin() { return iterator(&data()[0]); }; + const_iterator cbegin() const { return const_iterator(&data()[0]); }; + iterator end() { return iterator(&data()[_size]); }; + const_iterator cend() const { return const_iterator(&data()[_size]); }; + + reverse_iterator rbegin() { return std::reverse_iterator(end()); }; + const_reverse_iterator crbegin() const { return std::reverse_iterator(cend()); }; + reverse_iterator rend() { return std::reverse_iterator(begin()); }; + const_reverse_iterator crend() const { return std::reverse_iterator(cbegin()); }; + + // Capacity + + bool empty() const { return _size == 0; }; + size_type size() const { return _size; }; + void reserve(size_type new_cap); + size_type capacity() const { return _capacity; }; + void shrink_to_fit(); + + // Modifiers + + void clear(); + + void push_back(const T &value); + void push_back(T &&value); + void pop_back(); + + void resize(size_type count); + void resize(size_type count, const value_type &value); +}; + +static_assert(std::contiguous_iterator::iterator>); +static_assert(std::contiguous_iterator::const_iterator>); + +// MARK: Specialization for empty stack buffer + +template + requires std::unsigned_integral +class vector { + private: + T *_Nullable _buffer = nullptr; + size_type _size = 0; + size_type _capacity = 0; + + void reserve_slow(size_type new_cap); + + public: + using value_type = T; + using reference = value_type &; + using const_reference = const value_type &; + using iterator = value_type *_Nonnull; + using const_iterator = const value_type *_Nonnull; + using reverse_iterator = std::reverse_iterator; + using const_reverse_iterator = std::reverse_iterator; + + ~vector(); + + // Element access + + reference operator[](size_type pos) { return data()[pos]; }; + const_reference operator[](size_type pos) const { return data()[pos]; }; + + reference front() { return &data()[0]; }; + const_reference front() const { return *&data()[0]; }; + reference back() { return *&data()[_size - 1]; }; + const_reference back() const { return *&data()[_size - 1]; }; + + T *_Nonnull data() { return _buffer; }; + const T *_Nonnull data() const { return _buffer; }; + + // Iterators + + iterator begin() { return iterator(&data()[0]); }; + iterator end() { return iterator(&data()[_size]); }; + const_iterator cbegin() const { return const_iterator(&data()[0]); }; + const_iterator cend() const { return const_iterator(&data()[_size]); }; + const_iterator begin() const { return cbegin(); }; + const_iterator end() const { return cend(); }; + + reverse_iterator rbegin() { return std::reverse_iterator(end()); }; + reverse_iterator rend() { return std::reverse_iterator(begin()); }; + const_reverse_iterator crbegin() const { return std::reverse_iterator(cend()); }; + const_reverse_iterator crend() const { return std::reverse_iterator(cbegin()); }; + const_reverse_iterator rbegin() const { return crbegin(); }; + const_reverse_iterator rend() const { return crend(); }; + + // Capacity + + bool empty() const { return _size == 0; }; + size_type size() const { return _size; }; + void reserve(size_type new_cap); + size_type capacity() const { return _capacity; }; + void shrink_to_fit(); + + // Modifiers + + void clear(); + + void push_back(const T &value); + void push_back(T &&value); + void pop_back(); + + void resize(size_type count); + void resize(size_type count, const value_type &value); +}; + +// MARK: Specialization for unique_ptr + +template + requires std::unsigned_integral +class vector, 0, size_type> { + private: + std::unique_ptr *_Nullable _buffer = nullptr; + size_type _size = 0; + size_type _capacity = 0; + + void reserve_slow(size_type new_cap); + + public: + using value_type = std::unique_ptr; + using reference = value_type &; + using const_reference = const value_type &; + using iterator = value_type *_Nonnull; + using const_iterator = const value_type *_Nonnull; + using reverse_iterator = std::reverse_iterator; + using const_reverse_iterator = std::reverse_iterator; + + ~vector(); + + // Element access + + reference operator[](size_type pos) { return data()[pos]; }; + const_reference operator[](size_type pos) const { return data()[pos]; }; + + reference front() { return &data()[0]; }; + const_reference front() const { return *&data()[0]; }; + reference back() { return *&data()[_size - 1]; }; + const_reference back() const { return *&data()[_size - 1]; }; + + std::unique_ptr *_Nonnull data() { return _buffer; }; + const std::unique_ptr *_Nonnull data() const { return _buffer; }; + + // Iterators + + iterator begin() { return iterator(&data()[0]); }; + iterator end() { return iterator(&data()[_size]); }; + const_iterator cbegin() const { return const_iterator(&data()[0]); }; + const_iterator cend() const { return const_iterator(&data()[_size]); }; + const_iterator begin() const { return cbegin(); }; + const_iterator end() const { return cend(); }; + + reverse_iterator rbegin() { return std::reverse_iterator(end()); }; + reverse_iterator rend() { return std::reverse_iterator(begin()); }; + const_reverse_iterator crbegin() const { return std::reverse_iterator(cend()); }; + const_reverse_iterator crend() const { return std::reverse_iterator(cbegin()); }; + const_reverse_iterator rbegin() const { return crbegin(); }; + const_reverse_iterator rend() const { return crend(); }; + + // Capacity + + bool empty() const { return _size == 0; }; + size_type size() const { return _size; }; + void reserve(size_type new_cap); + size_type capacity() const { return _capacity; }; + void shrink_to_fit(); + + // Modifiers + + void clear(); + + void push_back(const std::unique_ptr &value) = delete; + ORB_INLINE void push_back(std::unique_ptr &&value); + ORB_INLINE void pop_back(); + + void resize(size_type count); + void resize(size_type count, const value_type &value); +}; + +} /* ORB */ + +ORB_ASSUME_NONNULL_END + +#include "vector.tpp" + +#endif /* vector.hpp */ diff --git a/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/vector.tpp b/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/vector.tpp new file mode 100644 index 0000000..1a878c2 --- /dev/null +++ b/Sources/OpenRenderBox/include/OpenRenderBoxCxx/Vector/vector.tpp @@ -0,0 +1,395 @@ +// +// vector.tpp +// OpenRenderBoxCxx +// +// Status: Complete +// Modified based Compute code + +#include +#include + +#include +#if ORB_TARGET_OS_DARWIN +#include +#else +#include +#endif /* ORB_TARGET_OS_DARWIN */ +#include +#include + +namespace ORB { + +#pragma mark - Base implementation + +namespace details { + +template + requires std::unsigned_integral +void *realloc_vector(void *buffer, void *stack_buffer, size_type stack_size, size_type *size, + size_type preferred_new_size) { + // copy data from heap buffer buffer into stack buffer if possible + if (preferred_new_size <= stack_size) { + if (buffer) { + memcpy(stack_buffer, buffer, preferred_new_size * element_size_bytes); + free(buffer); + *size = stack_size; + } + return nullptr; + } + + #if ORB_TARGET_OS_DARWIN + size_t new_size_bytes = malloc_good_size(preferred_new_size * element_size_bytes); + #else + size_t new_size_bytes = malloc_good_size(preferred_new_size * element_size_bytes); + #endif + size_type new_size = new_size_bytes / element_size_bytes; + if (new_size == *size) { + // nothing to do + return buffer; + } + + void *new_buffer = realloc(buffer, new_size_bytes); + if (!new_buffer) { + precondition_failure("allocation failure"); + } + + // copy data from stack buffer into heap buffer + if (!buffer) { + memcpy(new_buffer, stack_buffer, (*size) * element_size_bytes); + } + + *size = new_size; + return new_buffer; +} + +} /* namespace details */ + +template + requires std::unsigned_integral +vector::~vector() { + for (auto i = 0; i < _size; i++) { + data()[i].~T(); + } + if (_buffer) { + free(_buffer); + } +} + +template + requires std::unsigned_integral +void vector::reserve_slow(size_type new_cap) { + size_type effective_new_cap = std::max(capacity() * 1.5, new_cap * 1.0); + _buffer = reinterpret_cast(details::realloc_vector(_buffer, _stack_buffer, _stack_size, + &_capacity, effective_new_cap)); +} + +template + requires std::unsigned_integral +void vector::reserve(size_type new_cap) { + if (new_cap <= capacity()) { + return; + } + reserve_slow(new_cap); +} + +template + requires std::unsigned_integral +void vector::shrink_to_fit() { + if (capacity() > _size) { + _buffer = reinterpret_cast( + details::realloc_vector(_buffer, _stack_buffer, _stack_size, &_capacity, _size)); + } +} + +template + requires std::unsigned_integral +void vector::clear() { + for (auto i = 0; i < _size; i++) { + data()[i].~T(); + } + _size = 0; +} + +template + requires std::unsigned_integral +void vector::push_back(const T &value) { + reserve(_size + 1); + new (&data()[_size]) value_type(value); + _size += 1; +} + +template + requires std::unsigned_integral +void vector::push_back(T &&value) { + reserve(_size + 1); + new (&data()[_size]) value_type(std::move(value)); + _size += 1; +} + +template + requires std::unsigned_integral +void vector::pop_back() { + assert(size() > 0); + data()[_size - 1].~T(); + _size -= 1; +} + +template + requires std::unsigned_integral +void vector::resize(size_type count) { + reserve(count); + if (count < _size) { + for (auto i = count; i < _size; i++) { + data()[i].~T(); + } + } else if (count > _size) { + for (auto i = _size; i < count; i++) { + new (this[i]) value_type(); + } + } + _size = count; +} + +template + requires std::unsigned_integral +void vector::resize(size_type count, const value_type &value) { + reserve(count); + if (count < _size) { + for (auto i = count; i < _size; i++) { + data()[i].~T(); + } + } else if (count > _size) { + for (auto i = _size; i < count; i++) { + new (this[i]) value_type(value); + } + } + _size = count; +} + +#pragma mark - Specialization for empty stack buffer + +namespace details { + +template + requires std::unsigned_integral +void *realloc_vector(void *buffer, size_type *size, size_type preferred_new_size) { + if (preferred_new_size == 0) { + *size = 0; + free(buffer); + return nullptr; + } + + #if ORB_TARGET_OS_DARWIN + size_t new_size_bytes = malloc_good_size(preferred_new_size * element_size); + #else + size_t new_size_bytes = preferred_new_size * element_size; + #endif + size_type new_size = (size_type)(new_size_bytes / element_size); + if (new_size == *size) { + // nothing to do + return buffer; + } + + void *new_buffer = realloc(buffer, new_size_bytes); + if (!new_buffer) { + precondition_failure("allocation failure"); + } + *size = new_size; + return new_buffer; +} + +} /* namespace details */ + +template + requires std::unsigned_integral +vector::~vector() { + for (auto i = 0; i < _size; i++) { + _buffer[i].~T(); + } + if (_buffer) { + free(_buffer); + } +} + +template + requires std::unsigned_integral +void vector::reserve_slow(size_type new_cap) { + size_type effective_new_cap = std::max(capacity() * 1.5, new_cap * 1.0); + _buffer = + reinterpret_cast(details::realloc_vector(_buffer, &_capacity, effective_new_cap)); +} + +template + requires std::unsigned_integral +void vector::reserve(size_type new_cap) { + if (new_cap <= capacity()) { + return; + } + reserve_slow(new_cap); +} + +template +requires std::unsigned_integral +void vector::shrink_to_fit() { + if (capacity() > size()) { + _buffer = + reinterpret_cast(details::realloc_vector(_buffer, &_capacity, 0)); + } +} + +template + requires std::unsigned_integral +void vector::clear() { + for (auto i = 0; i < _size; i++) { + data()[i].~T(); + } + _size = 0; +} + +template + requires std::unsigned_integral +void vector::push_back(const T &value) { + reserve(_size + 1); + new (&_buffer[_size]) value_type(value); + _size += 1; +} + +template + requires std::unsigned_integral +void vector::push_back(T &&value) { + reserve(_size + 1); + new (&_buffer[_size]) value_type(std::move(value)); + _size += 1; +} + +template + requires std::unsigned_integral +void vector::pop_back() { + assert(size() > 0); + data()[_size - 1].~T(); + _size -= 1; +} + +template + requires std::unsigned_integral +void vector::resize(size_type count) { + reserve(count); + if (count < _size) { + for (auto i = count; i < _size; i++) { + data()[i].~T(); + } + } else if (count > _size) { + for (auto i = _size; i < count; i++) { + new (this[i]) value_type(); + } + } + _size = count; +} + +template + requires std::unsigned_integral +void vector::resize(size_type count, const value_type &value) { + reserve(count); + if (count < _size) { + for (auto i = count; i < _size; i++) { + data()[i].~T(); + } + } else if (count > _size) { + for (auto i = _size; i < count; i++) { + new (this[i]) value_type(value); + } + } + _size = count; +} + +#pragma mark - Specialization for unique_ptr + +template + requires std::unsigned_integral +vector, 0, size_type>::~vector() { + clear(); + if (_buffer) { + free(_buffer); + _buffer = nullptr; + } +} + +template + requires std::unsigned_integral +void vector, 0, size_type>::clear() { + for (size_type i = 0; i < _size; ++i) { + _buffer[i].reset(); + _buffer[i].~unique_ptr(); + } + _size = 0; +} + +template + requires std::unsigned_integral +void vector, 0, size_type>::reserve_slow(size_type new_cap) { + if (new_cap <= _capacity) { + return; + } + + size_type actual_new_cap = std::max(new_cap, _capacity * 2); + std::unique_ptr *new_buffer = (std::unique_ptr *)malloc(actual_new_cap * sizeof(std::unique_ptr)); + if (!new_buffer) { + return; + } + + for (size_type i = 0; i < _size; ++i) { + new (&new_buffer[i]) std::unique_ptr(std::move(_buffer[i])); + _buffer[i].~unique_ptr(); + } + + if (_buffer) { + free(_buffer); + } + + _buffer = new_buffer; + _capacity = actual_new_cap; +} + +template + requires std::unsigned_integral +void vector, 0, size_type>::reserve(size_type new_cap) { + if (new_cap > _capacity) { + reserve_slow(new_cap); + } +} + +template + requires std::unsigned_integral +ORB_INLINE void vector, 0, size_type>::push_back(std::unique_ptr &&value) { + reserve(_size + 1); + new (&_buffer[_size]) std::unique_ptr(std::move(value)); + ++_size; +} + +template + requires std::unsigned_integral +ORB_INLINE void vector, 0, size_type>::pop_back() { + assert(_size > 0); + _buffer[_size - 1].reset(); + _buffer[_size - 1].~unique_ptr(); + --_size; +} + +template + requires std::unsigned_integral +void vector, 0, size_type>::resize(size_type count) { + reserve(count); + if (count < _size) { + for (auto i = count; i < _size; i++) { + _buffer[i].reset(); + _buffer[i].~unique_ptr(); + } + } else if (count > _size) { + for (auto i = _size; i < count; i++) { + new (&_buffer[i]) std::unique_ptr(); + } + } + _size = count; +} + +} /* ORB */