From 698c2e01f0bdad75d995918b3d4d4caa5b07a4af Mon Sep 17 00:00:00 2001 From: Relintai Date: Sun, 15 Jan 2023 17:30:08 +0100 Subject: [PATCH] Backported the tight version of godot4's LocalVector as the new TightLocalVector class. --- core/containers/local_vector.h | 2 + core/containers/tight_local_vector.h | 318 +++++++++++++++++++++++++++ 2 files changed, 320 insertions(+) create mode 100644 core/containers/tight_local_vector.h diff --git a/core/containers/local_vector.h b/core/containers/local_vector.h index 06d837629..8ca2b605b 100644 --- a/core/containers/local_vector.h +++ b/core/containers/local_vector.h @@ -133,6 +133,7 @@ public: } } _FORCE_INLINE_ bool empty() const { return count == 0; } + _FORCE_INLINE_ U get_capacity() const { return capacity; } _FORCE_INLINE_ void reserve(U p_size) { p_size = nearest_power_of_2_templated(p_size); if (p_size > capacity) { @@ -279,6 +280,7 @@ public: data[i] = r[i]; } } + inline LocalVector &operator=(const LocalVector &p_from) { resize(p_from.size()); for (U i = 0; i < p_from.count; i++) { diff --git a/core/containers/tight_local_vector.h b/core/containers/tight_local_vector.h new file mode 100644 index 000000000..ecc205e4c --- /dev/null +++ b/core/containers/tight_local_vector.h @@ -0,0 +1,318 @@ +/**************************************************************************/ +/* local_vector.h */ +/**************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/**************************************************************************/ +/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */ +/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/**************************************************************************/ + +#ifndef TIGHT_LOCAL_VECTOR_H +#define TIGHT_LOCAL_VECTOR_H + +#include "core/containers/pool_vector.h" +#include "core/containers/sort_array.h" +#include "core/containers/vector.h" +#include "core/error/error_macros.h" +#include "core/os/memory.h" + +// It grows strictly as much as needed. (The vanilla LocalVector is what you want in most cases). +template +class TightLocalVector { +private: + U count = 0; + U capacity = 0; + T *data = nullptr; + +public: + T *ptr() { + return data; + } + + const T *ptr() const { + return data; + } + + _FORCE_INLINE_ void push_back(T p_elem) { + if (unlikely(count == capacity)) { + if (capacity == 0) { + capacity = 1; + } else { + capacity <<= 1; + } + data = (T *)memrealloc(data, capacity * sizeof(T)); + CRASH_COND_MSG(!data, "Out of memory"); + } + + if constexpr (!__has_trivial_constructor(T) && !force_trivial) { + memnew_placement(&data[count++], T(p_elem)); + } else { + data[count++] = p_elem; + } + } + + void remove(U p_index) { + ERR_FAIL_UNSIGNED_INDEX(p_index, count); + count--; + for (U i = p_index; i < count; i++) { + data[i] = data[i + 1]; + } + if constexpr (!__has_trivial_destructor(T) && !force_trivial) { + data[count].~T(); + } + } + + /// Removes the item copying the last value into the position of the one to + /// remove. It's generally faster than `remove`. + void remove_unordered(U p_index) { + ERR_FAIL_INDEX(p_index, count); + count--; + if (count > p_index) { + data[p_index] = data[count]; + } + if constexpr (!__has_trivial_destructor(T) && !force_trivial) { + data[count].~T(); + } + } + + void erase(const T &p_val) { + int64_t idx = find(p_val); + if (idx >= 0) { + remove(idx); + } + } + + U erase_multiple_unordered(const T &p_val) { + U from = 0; + U count = 0; + while (true) { + int64_t idx = find(p_val, from); + + if (idx == -1) { + break; + } + remove_unordered(idx); + from = idx; + count++; + } + return count; + } + + void invert() { + for (U i = 0; i < count / 2; i++) { + SWAP(data[i], data[count - i - 1]); + } + } + + _FORCE_INLINE_ void clear() { resize(0); } + _FORCE_INLINE_ void reset() { + clear(); + if (data) { + memfree(data); + data = nullptr; + capacity = 0; + } + } + _FORCE_INLINE_ bool empty() const { return count == 0; } + _FORCE_INLINE_ U get_capacity() const { return capacity; } + _FORCE_INLINE_ void reserve(U p_size) { + if (p_size > capacity) { + capacity = p_size; + data = (T *)memrealloc(data, capacity * sizeof(T)); + CRASH_COND_MSG(!data, "Out of memory"); + } + } + + _FORCE_INLINE_ U size() const { return count; } + void resize(U p_size) { + if (p_size < count) { + if (!__has_trivial_destructor(T) && !force_trivial) { + for (U i = p_size; i < count; i++) { + data[i].~T(); + } + } + count = p_size; + } else if (p_size > count) { + if (unlikely(p_size > capacity)) { + if (capacity == 0) { + capacity = 1; + } + while (capacity < p_size) { + capacity <<= 1; + } + data = (T *)memrealloc(data, capacity * sizeof(T)); + CRASH_COND_MSG(!data, "Out of memory"); + } + if (!__has_trivial_constructor(T) && !force_trivial) { + for (U i = count; i < p_size; i++) { + memnew_placement(&data[i], T); + } + } + count = p_size; + } + } + _FORCE_INLINE_ const T &operator[](U p_index) const { + CRASH_BAD_UNSIGNED_INDEX(p_index, count); + return data[p_index]; + } + _FORCE_INLINE_ T &operator[](U p_index) { + CRASH_BAD_UNSIGNED_INDEX(p_index, count); + return data[p_index]; + } + + void fill(T p_val) { + for (U i = 0; i < count; i++) { + data[i] = p_val; + } + } + + void insert(U p_pos, T p_val) { + ERR_FAIL_UNSIGNED_INDEX(p_pos, count + 1); + if (p_pos == count) { + push_back(p_val); + } else { + resize(count + 1); + for (U i = count - 1; i > p_pos; i--) { + data[i] = data[i - 1]; + } + data[p_pos] = p_val; + } + } + + int64_t find(const T &p_val, U p_from = 0) const { + for (U i = p_from; i < count; i++) { + if (data[i] == p_val) { + return int64_t(i); + } + } + return -1; + } + + template + void sort_custom() { + U len = count; + if (len == 0) { + return; + } + + SortArray sorter; + sorter.sort(data, len); + } + + void sort() { + sort_custom<_DefaultComparator>(); + } + + void ordered_insert(T p_val) { + U i; + for (i = 0; i < count; i++) { + if (p_val < data[i]) { + break; + } + } + insert(i, p_val); + } + + operator Vector() const { + Vector ret; + ret.resize(size()); + T *w = ret.ptrw(); + memcpy(w, data, sizeof(T) * count); + return ret; + } + + operator PoolVector() const { + PoolVector pl; + if (size()) { + pl.resize(size()); + typename PoolVector::Write w = pl.write(); + T *dest = w.ptr(); + memcpy(dest, data, sizeof(T) * count); + } + return pl; + } + + Vector to_byte_array() const { //useful to pass stuff to gpu or variant + Vector ret; + ret.resize(count * sizeof(T)); + uint8_t *w = ret.ptrw(); + memcpy(w, data, sizeof(T) * count); + return ret; + } + + _FORCE_INLINE_ TightLocalVector() {} + _FORCE_INLINE_ TightLocalVector(const TightLocalVector &p_from) { + resize(p_from.size()); + for (U i = 0; i < p_from.count; i++) { + data[i] = p_from.data[i]; + } + } + TightLocalVector(const Vector &p_from) { + resize(p_from.size()); + for (U i = 0; i < count; i++) { + data[i] = p_from[i]; + } + } + TightLocalVector(const PoolVector &p_from) { + resize(p_from.size()); + typename PoolVector::Read r = p_from.read(); + for (U i = 0; i < count; i++) { + data[i] = r[i]; + } + } + + inline void operator=(const TightLocalVector &p_from) { + resize(p_from.size()); + for (U i = 0; i < p_from.count; i++) { + data[i] = p_from.data[i]; + } + } + inline void operator=(const Vector &p_from) { + resize(p_from.size()); + for (U i = 0; i < count; i++) { + data[i] = p_from[i]; + } + } + inline TightLocalVector &operator=(const PoolVector &p_from) { + resize(p_from.size()); + typename PoolVector::Read r = p_from.read(); + for (U i = 0; i < count; i++) { + data[i] = r[i]; + } + return *this; + } + + _FORCE_INLINE_ ~TightLocalVector() { + if (data) { + reset(); + } + } +}; + +// Integer default version +template +class TightLocalVectori : public TightLocalVector { +}; + +#endif // TIGHT_LOCAL_VECTOR_H