From 486eec36ee5ce73c8ebae9b55b3a36fdf3178011 Mon Sep 17 00:00:00 2001 From: Ben Craig Date: Thu, 19 Dec 2024 16:25:48 -0500 Subject: [PATCH 1/3] Allocator redesign Switch to an allocator model similar to the STL. This required updating List, Vector, and SharedPtr, the current allocator clients. Users of allocators should generally indirect their operations through AllocTraits. This allows user-provided allocators to provide a minimal set of basis operations (AllocBytes + FreeBytes, or Alloc + Free), while letting the remainder of the operations to be given sane defaults. This PR's approach to allocators deviates from the STL, in that allocator types aren't required to be templated on the value type. This makes allocator implementations more approachable, avoids the need to rebind, and can even help build throughput due to fewer template instantiations. This PR also provides StdAllocator, a radiant allocator that forwards to std::allocator. This both provides an example of an allocator with all the features turned on, as well as provide the path to constexpr containers, if we want to go down that path. There are multiple test allocators (like Mallocator) that demonstrate the minimal interface. There are some drive-by fixes / changes to List and Vector. There were missing self-assignment / self-splicing checks before hand. Vector also didn't have a move ctor, but did have an allocator move ctor. Also I fixed one of the many FalseTypes. Useful background reading on implementing allocators: https://thephd.dev/allocator-hell-small_bit_vector . --- radiant/List.h | 148 +++++-- radiant/Memory.h | 501 ++++++++++++++++----- radiant/SharedPtr.h | 41 +- radiant/TotallyRad.h | 19 - radiant/TypeTraits.h | 2 +- radiant/Vector.h | 63 ++- radiant/detail/VectorOperations.h | 68 ++- test/TestAlloc.cpp | 104 +---- test/TestAlloc.h | 711 +++++++++++------------------- test/test_Allocators.cpp | 372 ++++++++++++++++ test/test_EmptyOptimizedPair.cpp | 12 +- test/test_List.cpp | 331 ++++++++++++-- test/test_SharedPtr.cpp | 98 ++-- test/test_Vector.cpp | 352 +++++++++++++-- 14 files changed, 1939 insertions(+), 883 deletions(-) create mode 100644 test/test_Allocators.cpp diff --git a/radiant/List.h b/radiant/List.h index 1b6bb99..565df84 100644 --- a/radiant/List.h +++ b/radiant/List.h @@ -104,14 +104,7 @@ class List using ReverseIteratorType = ReverseIterator; using ConstReverseIteratorType = ReverseIterator; - using Rebound = - typename TAllocator::template Rebind<::rad::detail::ListNode>::Other; - - // Not asserting noexcept movability on T, as we mostly don't need to move - // types once they are constructed. We support immovable types like - // mutexes. The Take* functions assert on noexcept movability since they - // do move contained elements. - RAD_S_ASSERT_ALLOCATOR_REQUIRES_T(TAllocator); + using AllocatorTraits = AllocTraits; ~List() { @@ -122,15 +115,29 @@ class List RAD_NOT_COPYABLE(List); List(List&& x) noexcept - : m_storage(::rad::Move(x.m_storage.First())) + : m_storage(x.m_storage.First()) { m_storage.Second().Swap(x.m_storage.Second()); } List& operator=(List&& x) noexcept { + // Don't allow non-propagation of allocators + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual || + AllocatorTraits::PropagateOnMoveAssignment, + "Cannot use move assignment with this allocator, as it could cause " + "copies. Either change allocators, or use something like Clone()."); + + if RAD_UNLIKELY (this == &x) + { + return *this; + } + Clear(); - Swap(x); + AllocatorTraits::PropagateOnMoveIfNeeded(m_storage.First(), + x.m_storage.First()); + m_storage.Second().Swap(x.m_storage.Second()); return *this; } @@ -141,7 +148,7 @@ class List Res Clone() { - List local(m_storage.First()); + List local(AllocatorTraits::SelectAllocOnCopy(m_storage.First())); return local.AssignSomeImpl(this->begin(), this->end()) .OnOk(::rad::Move(local)); } @@ -468,8 +475,8 @@ class List ::rad::detail::ListNode* typed = static_cast<::rad::detail::ListNode*>(cur); - typed->~ListNode(); - ReboundAlloc().Free(typed); + AllocatorTraits::Destroy(m_storage.First(), typed); + AllocatorTraits::Free(m_storage.First(), typed, 1); return IteratorType(retnode); } @@ -493,8 +500,8 @@ class List static_cast<::rad::detail::ListNode*>(cur); cur = cur->m_next; - typed->~ListNode(); - ReboundAlloc().Free(typed); + AllocatorTraits::Destroy(m_storage.First(), typed); + AllocatorTraits::Free(m_storage.First(), typed, 1); } return IteratorType(end); } @@ -540,66 +547,143 @@ class List List& Swap(List& x) noexcept { - { - rad::Swap(m_storage.First(), x.m_storage.First()); - } + // Don't allow non-propagation of allocators + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual || AllocatorTraits::PropagateOnSwap, + "Cannot use Swap with this allocator, as it could cause copies. " + "Either change allocators, or use move construction."); + + AllocatorTraits::PropagateOnSwapIfNeeded(m_storage.First(), + x.m_storage.First()); m_storage.Second().Swap(x.m_storage.Second()); return *this; } // The list parameter to the splice functions is mostly unused. It's // important to keep it though as a way to attest that you have mutable - // access to the source list. If we want to support unequal allocators, - // then we'll need access to the source list. We'll also need to add an - // error channel if we support unequal allocators. + // access to the source list. + // Self-splicing is erroneous behavior, with a fallback behavior of no-op. List& SpliceAll(ConstIteratorType position, List& x) { + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual, + "Cannot use SpliceAll with potentially unequal allocators. " + "Either change allocators, or use Insert()."); + + if (&x == this) + { + RAD_ASSERT(false); // "You cannot splice a list into itself." + return *this; + } + m_storage.Second().SpliceSome(position.m_node, x.begin().m_node, x.end().m_node); return *this; } + // Self-splicing is erroneous behavior, with a fallback behavior of no-op. List& SpliceAll(ConstIteratorType position, List&& x) { + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual, + "Cannot use SpliceAll with potentially unequal allocators. " + "Either change allocators, or use Insert()."); + + if (&x == this) + { + RAD_ASSERT(false); // "You cannot splice a list into itself." + return *this; + } + m_storage.Second().SpliceSome(position.m_node, x.begin().m_node, x.end().m_node); return *this; } + // If `i` doesn't point inside `x`, the behavior is undefined. + // Self-splicing is erroneous behavior, with a fallback behavior of no-op. List& SpliceOne(ConstIteratorType position, List& x, ConstIteratorType i) { - RAD_UNUSED(x); + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual, + "Cannot use SpliceOne with potentially unequal allocators. " + "Either change allocators, or use Insert()."); + + if (&x == this) + { + RAD_ASSERT(false); // "You cannot splice a list into itself." + return *this; + } + m_storage.Second().SpliceOne(position.m_node, i.m_node); return *this; } + // If `i` doesn't point inside `x`, the behavior is undefined. + // Self-splicing is erroneous behavior, with a fallback behavior of no-op. List& SpliceOne(ConstIteratorType position, List&& x, ConstIteratorType i) { - RAD_UNUSED(x); + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual, + "Cannot use SpliceOne with potentially unequal allocators. " + "Either change allocators, or use Insert()."); + + if (&x == this) + { + RAD_ASSERT(false); // "You cannot splice a list into itself." + return *this; + } + m_storage.Second().SpliceOne(position.m_node, i.m_node); return *this; } + // If (`first`, `last`] isn't a valid range inside `x`, the behavior is + // undefined. Self-splicing is erroneous behavior, with a fallback behavior + // of no-op. List& SpliceSome(ConstIteratorType position, List& x, ConstIteratorType first, ConstIteratorType last) { - RAD_UNUSED(x); + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual, + "Cannot use SpliceSome with potentially unequal allocators. " + "Either change allocators, or use Insert()."); + + if (&x == this) + { + RAD_ASSERT(false); // "You cannot splice a list into itself." + return *this; + } + m_storage.Second().SpliceSome(position.m_node, first.m_node, last.m_node); return *this; } + // If (`first`, `last`] isn't a valid range inside `x`, the behavior is + // undefined. Self-splicing is erroneous behavior, with a fallback behavior + // of no-op. List& SpliceSome(ConstIteratorType position, List&& x, ConstIteratorType first, ConstIteratorType last) { - RAD_UNUSED(x); + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual, + "Cannot use SpliceSome with potentially unequal allocators. " + "Either change allocators, or use Insert()."); + + if (&x == this) + { + RAD_ASSERT(false); // "You cannot splice a list into itself." + return *this; + } + m_storage.Second().SpliceSome(position.m_node, first.m_node, last.m_node); @@ -636,14 +720,19 @@ class List RAD_NODISCARD ::rad::detail::ListNode* EmplacePtr( ConstIteratorType position, Args&&... args) { - ::rad::detail::ListNode* storage = ReboundAlloc().Alloc(1); + ::rad::detail::ListNode* storage = + AllocatorTraits::template Alloc<::rad::detail::ListNode>( + m_storage.First(), + 1); if (storage == nullptr) { return nullptr; } // forward to placement new ::rad::detail::ListNode* new_node = - new (storage)::rad::detail::ListNode(Forward(args)...); + AllocatorTraits::Construct(m_storage.First(), + storage, + Forward(args)...); // insert the new node before passed in position m_storage.Second().AttachNewNode(position.m_node, new_node); @@ -690,11 +779,6 @@ class List return *this; } - Rebound ReboundAlloc() - { - return m_storage.First(); - } - EmptyOptimizedPair m_storage; }; diff --git a/radiant/Memory.h b/radiant/Memory.h index e82a5a3..12663eb 100644 --- a/radiant/Memory.h +++ b/radiant/Memory.h @@ -15,7 +15,16 @@ #pragma once #include "radiant/TotallyRad.h" +#include "radiant/Algorithm.h" #include "radiant/TypeTraits.h" +#include "radiant/Utility.h" +#include "radiant/detail/Meta.h" + +#include + +#if RAD_ENABLE_STD +#include +#endif // // Users of Radiant may define their own default allocator. Radiant itself @@ -23,8 +32,8 @@ // enforce verbosity if they wish. // #ifdef RAD_DEFAULT_ALLOCATOR -#define RAD_ALLOCATOR(x) RAD_DEFAULT_ALLOCATOR -#define RAD_ALLOCATOR_EQ(x) = RAD_DEFAULT_ALLOCATOR +#define RAD_ALLOCATOR(x) RAD_DEFAULT_ALLOCATOR +#define RAD_ALLOCATOR_EQ(x) = RAD_DEFAULT_ALLOCATOR #else #define RAD_ALLOCATOR(x) #define RAD_ALLOCATOR_EQ(x) @@ -33,110 +42,400 @@ namespace rad { -#if 0 -/// @brief Example Radiant compatible allocator. -/// @details Note that this allocator has no implementation and exists only as -/// an example for allocator implementors. Consider it a concept contract for an -/// allocator compatible with Radiant. -/// @tparam T The type of object to allocate. +namespace detection +{ + +// helper macro for stamping out traits that default to false +#define RAD_TRAIT_DETECTOR(Trait) \ + template \ + struct Trait \ + { \ + static constexpr bool Val = false; \ + }; \ + \ + template \ + struct Trait> \ + { \ + static constexpr bool Val = T::Trait; \ + } + +RAD_TRAIT_DETECTOR(PropagateOnCopy); +RAD_TRAIT_DETECTOR(PropagateOnMoveAssignment); +RAD_TRAIT_DETECTOR(PropagateOnSwap); +RAD_TRAIT_DETECTOR(HasConstructAndDestroy); +RAD_TRAIT_DETECTOR(HasTypedAllocations); + +#undef RAD_TRAIT_DETECTOR + +// If your class is empty, then you can use the default of "True" for +// IsAlwaysEqual. Everyone else needs to define IsAlwaysEqual in their class. +template +struct IsAlwaysEqual +{ + RAD_S_ASSERTMSG(IsEmpty, + "Non-empty Allocators must define IsAlwaysEqual"); + static constexpr bool Val = true; +}; + template -class Allocator +struct IsAlwaysEqual> +{ + static constexpr bool Val = T::IsAlwaysEqual; +}; + +} // namespace detection + +template +class AllocTraits { public: - /// @brief Trait indicating if freeing memory is required. - /// @details If this is false, users of the allocator need not free memory - /// that was allocated. This trait enables certain run or compile time - /// optimizations. An example of an allocator that might leverage this trait - /// is a stack allocator. When false, an allocator implementor may implement - /// a no-op Free function. - static constexpr bool NeedsFree = true; - - /// @brief Trait indicating if the allocator supports reallocations. - /// @details If this is false, users of the allocator should not call - /// Realloc as the allocator does not support it. In some scenarios it may - /// be possible and more efficient to reallocate memory in place rather than - /// allocating new memory. When false, an allocator implementor may - /// implement a no-op Realloc function. - static constexpr bool HasRealloc = true; - - /// @brief Trait indicating if the allocator supports allocating by bytes. - /// @details If this is true, users of the allocator may call the Bytes - /// suffixed functions to allocate and free memory as bytes rather than as - /// number of T elements. An example is cases where some memory needs - /// allocated before or after T. However it may not be possible for all types - /// of allocators, such as a slab/lookaside allocator. When false, an - /// allocator may implement appropriate no-op functions for the various Bytes - /// suffixes function. - static constexpr bool HasAllocBytes = true; - - using ThisType = Allocator; - using ValueType = T; - using SizeType = uint32_t; - using DifferenceType = ptrdiff_t; - - ~Allocator() = default; - - constexpr Allocator() noexcept = default; - - constexpr Allocator(const Allocator&) noexcept = default; - - template - constexpr Allocator(const Allocator&) noexcept - { - } - - template - struct Rebind - { - using Other = Allocator; - }; - - /// @brief Frees memory allocated by Alloc. - /// @param ptr Pointer to the memory to free. - void Free(ValueType* ptr) noexcept; - - /// @brief Allocates memory for count number of T. - /// @param count The number of T to allocate memory for. - /// @return Pointer to the allocated memory. - ValueType* Alloc(SizeType count); - - /// @brief Reallocates memory for count number of T. - /// @param ptr Pointer to the memory to reallocate. If nullptr a new memory block is allocated. - /// @param count The number of T to allocate memory for. - /// @return Pointer to the reallocated memory. - ValueType* Realloc(ValueType* ptr, SizeType count); - - /// @brief Frees memory allocated by AllocBytes. - /// @param ptr Pointer to the memory to free. - void FreeBytes(void* ptr) noexcept; - - /// @brief Allocates memory for size number of bytes. - /// @param size The number of bytes to allocate memory for. - /// @return Pointer to the allocated memory. - void* AllocBytes(SizeType size); - - /// @brief Reallocates memory for size number of bytes. - /// @param ptr Pointer to the memory to reallocate. If nullptr a new memory block is allocated. - /// @param size The number of bytes to allocate memory for. - /// @return Pointer to the reallocated memory. - void* ReallocBytes(void* ptr, SizeType size); + static constexpr bool PropagateOnCopy = + detection::PropagateOnCopy::Val; // defaults to false + static constexpr bool PropagateOnMoveAssignment = + detection::PropagateOnMoveAssignment::Val; // defaults to false + static constexpr bool PropagateOnSwap = + detection::PropagateOnSwap::Val; // defaults to false + static constexpr bool IsAlwaysEqual = + detection::IsAlwaysEqual::Val; // defaults to true when is_empty + static constexpr bool HasConstructAndDestroy = + detection::HasConstructAndDestroy::Val; // defaults to false + static constexpr bool HasTypedAllocations = + detection::HasTypedAllocations::Val; // defaults to false + + static constexpr size_t MaxSize = ~size_t(0); + + // basis operations + static void* AllocBytes(AllocT& a, size_t size) + { + return a.AllocBytes(size); + } + + static void FreeBytes(AllocT& a, void* ptr, size_t size) noexcept + { + RAD_S_ASSERTMSG(noexcept(a.FreeBytes(ptr, size)), + "Allocator::FreeBytes must be noexcept"); + a.FreeBytes(ptr, size); + } + + // typed operations + template + static constexpr T* Alloc(AllocT& a, size_t n) + { + return AllocImpl(IntegralConstant{}, + a, + n); + } + + template + static constexpr void Free(AllocT& a, T* p, size_t n) noexcept + { + FreeImpl(IntegralConstant{}, a, p, n); + } + + // constexpr construct and destroy operations + template + static constexpr T* Construct(AllocT& a, T* p, Args&&... args) + { + return ConstructImpl(IntegralConstant{}, + a, + p, + Forward(args)...); + } + + template + static constexpr void Destroy(AllocT& a, T* p) noexcept + { + DestroyImpl(IntegralConstant{}, a, p); + } + + static constexpr bool Equal(const AllocT& a, const AllocT& b) noexcept + { + return EqualImpl(IntegralConstant{}, a, b); + } + + static constexpr void PropagateOnMoveIfNeeded(AllocT& dest, + AllocT& src) noexcept + { + return PropagateIfNeededImpl(IntegralConstant < bool, + PropagateOnMoveAssignment && + !IsAlwaysEqual > {}, + dest, + src); + } + + static constexpr void PropagateOnCopyIfNeeded(AllocT& a, AllocT& b) noexcept + { + return PropagateIfNeededImpl(IntegralConstant < bool, + PropagateOnCopy && !IsAlwaysEqual > {}, + a, + b); + } + + static constexpr void PropagateOnSwapIfNeeded(AllocT& a, AllocT& b) noexcept + { + return PropagateOnSwapIfNeededImpl(IntegralConstant < bool, + PropagateOnSwap && + !IsAlwaysEqual > {}, + a, + b); + } + + static constexpr AllocT SelectAllocOnCopy(const AllocT& src_alloc) noexcept + { + RAD_S_ASSERTMSG(PropagateOnCopy || IsAlwaysEqual || + IsDefaultCtor, + "You are attempting to copy / clone a container, but " + "your allocator doesn't want to PropagateOnCopy, and " + "we can't default construct your allocator."); + return SelectAllocOnCopyImpl(IntegralConstant < bool, + IsAlwaysEqual || PropagateOnCopy > {}, + src_alloc); + } + +private: + + // static assert the primary requirements. There are additional + // requirements in some of the customization points, and we static assert + // if you provide the customization without meeting the additional + // requirements. + RAD_S_ASSERTMSG(IsNoThrowDtor // + && IsNoThrowCopyCtor // + && IsNoThrowCopyAssign // + && IsNoThrowMoveCtor // + && IsNoThrowMoveAssign, + "Allocator requirements not met"); + + template + static constexpr T* AllocImpl(TrueType, // HasTypedAllocations + AllocT& a, + size_t n) + { + return a.template Alloc(n); + } + + template + static constexpr T* AllocImpl(FalseType, // !HasTypedAllocations + AllocT& a, + size_t n) + { + if (n > MaxSize / sizeof(T)) + { + // If you want to keep returning nullptr, make your + // HandleSizeOverflow a no-op. If you want an exception or an + // assertion, then do that instead. + a.HandleSizeOverflow(); + return nullptr; + } + + void* mem = a.AllocBytes(n * sizeof(T)); + return static_cast(mem); + } + + template + static constexpr void FreeImpl(TrueType, // HasTypedAllocations + AllocT& a, + T* p, + size_t n) noexcept + { + RAD_S_ASSERTMSG(noexcept(a.Free(p, n)), + "Allocator::Free must be noexcept"); + return a.Free(p, n); + } + + template + static constexpr void FreeImpl(FalseType, // !HasTypedAllocations + AllocT& a, + T* p, + size_t n) noexcept + { + RAD_FAST_FAIL((p == nullptr) || (n <= MaxSize / sizeof(T))); + a.FreeBytes(p, n * sizeof(T)); + } + + template + static constexpr T* ConstructImpl(TrueType, // HasConstructAndDestroy + AllocT& a, + T* p, + Args&&... args) + { + return a.Construct(p, Forward(args)...); + } + + template + static constexpr T* ConstructImpl(FalseType, // !HasConstructAndDestroy + AllocT& a, + T* p, + Args&&... args) + { + RAD_UNUSED(a); + return ::new (static_cast(p)) T(Forward(args)...); + } + + template + static constexpr void DestroyImpl(TrueType, // HasConstructAndDestroy + AllocT& a, + T* p) noexcept + { + RAD_S_ASSERTMSG(noexcept(a.Destroy(p)), + "Allocator::Destroy must be noexcept"); + a.Destroy(p); + } + + template + static constexpr void DestroyImpl(FalseType, // !HasConstructAndDestroy + AllocT& a, + T* p) noexcept + { + RAD_UNUSED(a); + p->~T(); + } + + static constexpr bool EqualImpl(TrueType, // IsAlwaysEqual + const AllocT& a, + const AllocT& b) noexcept + { + RAD_UNUSED(a); + RAD_UNUSED(b); + return true; + } + + static constexpr bool EqualImpl(FalseType, // !IsAlwaysEqual + const AllocT& a, + const AllocT& b) noexcept + { + RAD_S_ASSERTMSG(noexcept(a == b), + "Allocator::operator== must be noexcept"); + return a == b; + } + + static constexpr void PropagateIfNeededImpl( + TrueType, // PropagateOn* && !IsAlwaysEqual + AllocT& dest, + AllocT& src) noexcept + { + if (!(dest == src)) // avoid using !=, as AllocT may not define it + { + dest = src; + } + } + + static constexpr void PropagateIfNeededImpl( + FalseType, // ! (PropagateOn* && !IsAlwaysEqual) + AllocT& dest, + AllocT& src) noexcept + { + RAD_UNUSED(dest); + RAD_UNUSED(src); + } + + static constexpr void PropagateOnSwapIfNeededImpl( + TrueType, // PropagateOnSwap && !IsAlwaysEqual + AllocT& a, + AllocT& b) noexcept + { + using rad::Swap; + Swap(a, b); + } + + static constexpr void PropagateOnSwapIfNeededImpl( + FalseType, // ! (PropagateOnSwap && !IsAlwaysEqual) + AllocT& a, + AllocT& b) noexcept + { + RAD_UNUSED(a); + RAD_UNUSED(b); + } + + static constexpr AllocT SelectAllocOnCopyImpl( + TrueType, // IsAlwaysEqual || PropagateOnCopy + const AllocT& src_alloc) noexcept + { + return src_alloc; + } + + static constexpr AllocT SelectAllocOnCopyImpl( + FalseType, // !(IsAlwaysEqual || PropagateOnCopy) + const AllocT& src_alloc) noexcept + { + RAD_UNUSED(src_alloc); + return AllocT(); + } }; -#endif -// -// Radiant allocator concept requires: -// - Destruction does not throw. -// - Copying does not throw. -// - Moving does not throw. -// - Freeing memory does not throw. -// -template -RAD_INLINE_VAR constexpr bool AllocatorRequires = - (IsNoThrowDtor && // - IsNoThrowCopyCtor && IsNoThrowCopyAssign && // - IsNoThrowMoveCtor && IsNoThrowMoveAssign && - noexcept(DeclVal().Free(nullptr)) && - noexcept(DeclVal().FreeBytes(nullptr))); +#if RAD_ENABLE_STD && RAD_USER_MODE +class StdAllocator +{ +public: + + static constexpr bool PropagateOnCopy = false; + static constexpr bool PropagateOnMoveAssignment = false; + static constexpr bool PropagateOnSwap = false; + static constexpr bool IsAlwaysEqual = true; + + static constexpr bool HasTypedAllocations = true; + static constexpr bool HasConstructAndDestroy = true; + + constexpr bool operator==(const StdAllocator& /*rhs*/) const noexcept + { + return true; + } + + constexpr bool operator!=(const StdAllocator& /*rhs*/) const noexcept + { + return false; + } + + // Allocate and free are intentionally not constexpr. Constexpr evaluation + // requires typed parameters. + static void* AllocBytes(size_t size) + { + std::allocator al; + return al.allocate(size); + } + + static void FreeBytes(void* ptr, size_t size) noexcept + { + std::allocator al; + return al.deallocate(static_cast(ptr), size); + } + + template + static constexpr T* Alloc(size_t count) + { + std::allocator al; + return al.allocate(count); + } + + template + static constexpr void Free(T* ptr, size_t count) noexcept + { + std::allocator al; + return al.deallocate(ptr, count); + } + + template + static constexpr T* Construct(T* p, Args&&... args) + { + // use allocator_traits, because std::allocator::construct was removed + // in C++20, and construct_at wasn't added until C++20. + std::allocator al; + std::allocator_traits>::construct( + al, + p, + Forward(args)...); + return p; + } + + template + static constexpr void Destroy(T* p) noexcept + { + // use allocator_traits, because std::allocator::destroy was removed in + // C++20, and destroy_at wasn't added until C++17 + std::allocator al; + std::allocator_traits>::destroy(al, p); + } +}; +#endif // RAD_ENABLE_STD && RAD_USER_MODE ^^^ } // namespace rad diff --git a/radiant/SharedPtr.h b/radiant/SharedPtr.h index 4edf3ee..ee20e71 100644 --- a/radiant/SharedPtr.h +++ b/radiant/SharedPtr.h @@ -179,14 +179,16 @@ class PtrBlockBase template class PtrBlock final : public PtrBlockBase { +private: + + using AllocatorTraits = AllocTraits; + public: - using AllocatorType = typename TAlloc::template Rebind::Other; + using AllocatorType = TAlloc; using ValueType = T; using PairType = EmptyOptimizedPair; - RAD_S_ASSERT_ALLOCATOR_REQUIRES_T(TAlloc); - template PtrBlock(const AllocatorType& alloc, TArgs&&... args) noexcept( noexcept(PairType(alloc, Forward(args)...))) @@ -207,6 +209,8 @@ class PtrBlock final : public PtrBlockBase void OnWeakZero() const noexcept override { + // + // Do the moral equivalent of `delete this`. // // Take a copy of the allocator first, this will be used to do the free. // Then destruct the remaining parts of the block. @@ -220,7 +224,7 @@ class PtrBlock final : public PtrBlockBase AllocatorType alloc(Allocator()); auto self = const_cast(this); Allocator().~AllocatorType(); - alloc.Free(self); + AllocatorTraits::Free(alloc, self, 1); } AllocatorType& Allocator() noexcept @@ -904,43 +908,36 @@ namespace detail struct AllocateSharedImpl { /// @brief RAII-safety wrapper helper - template + template struct AllocateSharedHelper { - constexpr AllocateSharedHelper(TAlloc& ta) noexcept + constexpr AllocateSharedHelper(const TAlloc& ta) noexcept : alloc(ta), block(nullptr) { + block = AllocTraits::template Alloc(alloc, 1); } ~AllocateSharedHelper() { - if (block) - { - alloc.Free(block); - } + AllocTraits::Free(alloc, block, 1); } - TAlloc& alloc; - typename TAlloc::ValueType* block; + TAlloc alloc; + BlockType* block = nullptr; }; template static inline SharedPtr AllocateShared(const TAlloc& alloc, - TArgs&&... args) // - noexcept(noexcept(DeclVal::AllocatorType>() - .Alloc(1)) && - IsNoThrowCtor) + TArgs&&... args) { using BlockType = PtrBlock; - typename BlockType::AllocatorType blockAlloc(alloc); - AllocateSharedHelper excSafe(blockAlloc); + AllocateSharedHelper excSafe(alloc); - excSafe.block = blockAlloc.Alloc(1); if RAD_LIKELY (excSafe.block != nullptr) { - new (excSafe.block) BlockType(blockAlloc, Forward(args)...); + new (excSafe.block) BlockType(alloc, Forward(args)...); auto block = excSafe.block; excSafe.block = nullptr; return SharedPtr(block, &block->Value()); @@ -963,8 +960,6 @@ SharedPtr AllocateShared(const TAlloc& alloc, TArgs&&... args) // noexcept(noexcept(detail::AllocateSharedImpl::AllocateShared( alloc, Forward(args)...))) { - RAD_S_ASSERT_ALLOCATOR_REQUIRES_T(TAlloc); - return detail::AllocateSharedImpl::AllocateShared( alloc, Forward(args)...); @@ -981,8 +976,6 @@ template SharedPtr MakeShared(TArgs&&... args) noexcept( noexcept(AllocateShared(DeclVal(), Forward(args)...))) { - RAD_S_ASSERT_ALLOCATOR_REQUIRES_T(TAlloc); - TAlloc alloc; return AllocateShared(alloc, Forward(args)...); } diff --git a/radiant/TotallyRad.h b/radiant/TotallyRad.h index ab8c35b..18bff7f 100644 --- a/radiant/TotallyRad.h +++ b/radiant/TotallyRad.h @@ -296,25 +296,6 @@ extern "C" __declspec(noreturn) void __fastfail(unsigned int code); #define RAD_S_ASSERT_NOTHROW_MOVE_T(x) RAD_S_ASSERT(true) #endif -// -// Enables assertions that allocators meet the Radiant allocator concept -// requirements. -// -// See: rad::AllocatorRequires -// -#ifndef RAD_ENABLE_ALLOCATOR_REQUIRES_ASSERTIONS -#define RAD_ENABLE_ALLOCATOR_REQUIRES_ASSERTIONS 1 -#endif -#if RAD_ENABLE_ALLOCATOR_REQUIRES_ASSERTIONS -#define RAD_S_ASSERT_ALLOCATOR_REQUIRES(x) \ - RAD_S_ASSERTMSG(x, "allocator requirements not met") -#define RAD_S_ASSERT_ALLOCATOR_REQUIRES_T(x) \ - RAD_S_ASSERT_ALLOCATOR_REQUIRES(::rad::AllocatorRequires) -#else -#define RAD_S_ASSERT_ALLOCATOR_REQUIRES(x) RAD_S_ASSERT(true) -#define RAD_S_ASSERT_ALLOCATOR_REQUIRES_T(x) RAD_S_ASSERT(true) -#endif - #define RAD_NOT_COPYABLE(x) \ x(x const&) = delete; \ x& operator=(x const&) = delete diff --git a/radiant/TypeTraits.h b/radiant/TypeTraits.h index 4d7ed63..0a7772b 100644 --- a/radiant/TypeTraits.h +++ b/radiant/TypeTraits.h @@ -129,7 +129,7 @@ struct EnIfUnrelated : enable_if> template using IntegralConstant = integral_constant; using TrueType = IntegralConstant; -using FalseType = IntegralConstant; +using FalseType = IntegralConstant; template RAD_INLINE_VAR constexpr bool IsIntegral = is_integral::value; diff --git a/radiant/Vector.h b/radiant/Vector.h index a4ff3ba..2a47591 100644 --- a/radiant/Vector.h +++ b/radiant/Vector.h @@ -43,6 +43,7 @@ class Vector final using OperationalType = detail::VectorOperations; using StorageType = EmptyOptimizedPair; + using AllocatorTraits = AllocTraits; public: @@ -55,7 +56,6 @@ class Vector final using OtherType = Vector; RAD_S_ASSERT_NOTHROW_MOVE_T(T); - RAD_S_ASSERT_ALLOCATOR_REQUIRES_T(TAllocator); RAD_NOT_COPYABLE(Vector); @@ -78,11 +78,12 @@ class Vector final { } - /// @brief Constructs empty container with move-constructed allocator. - /// @param alloc Allocator to move. - explicit Vector(AllocatorType&& alloc) noexcept - : m_storage(Forward(alloc)) + /// @brief Move constructs container from another. + /// @param other Container to steal from. + explicit Vector(ThisType&& other) noexcept + : m_storage(other.Allocator()) { + other.Storage().Move(Allocator(), Storage()); } /// @brief Moves elements in another container into this. @@ -90,7 +91,21 @@ class Vector final /// @return Reference to this container. ThisType& operator=(ThisType&& other) noexcept { - other.Storage().Move(other.Allocator(), Storage()); + // Don't allow non-propagation of allocators + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual || + AllocatorTraits::PropagateOnMoveAssignment, + "Cannot use move assignment with this allocator, as it could cause " + "copies. Either change allocators, or use something like Copy()."); + + if RAD_UNLIKELY (this == &other) + { + return *this; + } + + other.Storage().Move(Allocator(), Storage()); + AllocatorTraits::PropagateOnMoveIfNeeded(Allocator(), + other.Allocator()); return *this; } @@ -238,7 +253,15 @@ class Vector final /// @return Reference to this container. ThisType& Swap(ThisType& other) noexcept { + // Don't allow non-propagation of allocators + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual || AllocatorTraits::PropagateOnSwap, + "Cannot use Swap with this allocator, as it could cause copies. " + "Either change allocators, or use move construction."); + Storage().Swap(other.Storage()); + AllocatorTraits::PropagateOnSwapIfNeeded(Allocator(), + other.Allocator()); return *this; } @@ -367,6 +390,14 @@ class Vector final return Span(); } + /// @brief Create a copy of the current Vector + /// @return The new container on success or an error. + Res Clone() + { + ThisType local(AllocatorTraits::SelectAllocOnCopy(Allocator())); + return local.Assign(ToSpan()).OnOk(::rad::Move(local)); + } + /// @brief Copies the elements in this container to another. /// @param to Container to copy elements to. /// @return Result reference to this container on success or an error. @@ -376,7 +407,15 @@ class Vector final // the storage and allocator are private when the from and to vectors // are not exactly the same type. This problem can be solved, but // requires a bit of work. - return Storage().Copy(to.Allocator(), to.Storage()).OnOk(*this); + auto res = Storage() + .Copy(Allocator(), to.Storage(), to.Allocator()) + .OnOk(*this); + if (res.IsOk()) + { + AllocatorTraits::PropagateOnCopyIfNeeded(to.Allocator(), + Allocator()); + } + return res; } /// @brief Moves the elements in this container to another. @@ -384,7 +423,15 @@ class Vector final /// @return Result reference to this container on success or an error. ThisType& Move(ThisType& to) noexcept { - Storage().Move(Allocator(), to.Storage()); + // Don't allow non-propagation of allocators + RAD_S_ASSERTMSG( + AllocatorTraits::IsAlwaysEqual || + AllocatorTraits::PropagateOnMoveAssignment, + "Cannot use move assignment with this allocator, as it could cause " + "copies. Either change allocators, or use something like Copy()."); + + Storage().Move(to.Allocator(), to.Storage()); + AllocatorTraits::PropagateOnMoveIfNeeded(to.Allocator(), Allocator()); return *this; } diff --git a/radiant/detail/VectorOperations.h b/radiant/detail/VectorOperations.h index f86f83a..0f99374 100644 --- a/radiant/detail/VectorOperations.h +++ b/radiant/detail/VectorOperations.h @@ -17,6 +17,7 @@ #include "radiant/TotallyRad.h" #include "radiant/Algorithm.h" // NOLINT(misc-include-cleaner) #include "radiant/Iterator.h" +#include "radiant/Memory.h" #include "radiant/Res.h" #include "radiant/Span.h" #include "radiant/TypeTraits.h" @@ -34,12 +35,14 @@ struct VectorAlloc { public: + using AllocatorTraits = AllocTraits; + ~VectorAlloc() { if (buffer) { Clear(); - allocator.Free(buffer); + AllocatorTraits::Free(allocator, buffer, capacity); } } @@ -57,7 +60,7 @@ struct VectorAlloc { RAD_ASSERT(buffer == nullptr); - buffer = allocator.Alloc(count); + buffer = AllocatorTraits::template Alloc(allocator, count); if (!buffer) { return false; @@ -384,10 +387,7 @@ struct VectorStorage template void Free(TAllocator& alloc) noexcept { - if (m_data) - { - alloc.Free(m_data); - } + AllocTraits::Free(alloc, m_data, m_capacity); } template @@ -486,9 +486,9 @@ struct VectorStorage template void Free(TAllocator& alloc) noexcept { - if (!IsInline() && m_data) + if (!IsInline()) { - alloc.Free(m_data); + AllocTraits::Free(alloc, m_data, m_capacity); } } @@ -520,10 +520,7 @@ struct VectorStorage // auto data = m_data; ManipType().MoveCtorDtorSrcRange(m_inline, data, m_size); - if (data) - { - alloc.Free(data); - } + AllocTraits::Free(alloc, data, m_capacity); m_capacity = InlineCount; } else @@ -679,22 +676,22 @@ struct VectorOperations : public VectorStorage } template - void Move(TAllocator& alloc, ThisType& to) noexcept + void Move(TAllocator& to_alloc, ThisType& to) noexcept { if RAD_UNLIKELY (this == &to) { return; } + to.Clear(); + RAD_VERIFY(to.ShrinkToFit(to_alloc).IsOk()); Swap(to); - Clear(); - RAD_VERIFY(ShrinkToFit(alloc).IsOk()); } - template , int> = 0> - Err Copy(TAllocator& alloc, ThisType& to) + template + Err StrongCopy(TAllocator& new_alloc, + ThisType& to, + TAllocator& old_to_alloc) { if RAD_UNLIKELY (this == &to) { @@ -711,29 +708,56 @@ struct VectorOperations : public VectorStorage // implemented the basic thing, but should/will optimize further. Two // easy things we could are to use unused space in the vectors and/or // some stack space to avoid expensive memory allocations when possible. - VectorAlloc vec(alloc); + TAllocator target_alloc = + AllocTraits::PropagateOnCopy ? new_alloc : old_to_alloc; + VectorAlloc vec(target_alloc); if (!vec.Alloc(m_size)) { return Error::NoMemory; } ManipType().CopyCtorRange(vec, Data(), m_size); + + to.Clear(); + RAD_VERIFY(to.ShrinkToFit(old_to_alloc).IsOk()); to.Swap(vec); return NoError; } + template , int> = 0> + Err Copy(TAllocator& new_alloc, ThisType& to, TAllocator& old_to_alloc) + { + return StrongCopy(new_alloc, to, old_to_alloc); + } + template , int> = 0> - Err Copy(TAllocator& alloc, ThisType& to) noexcept + Err Copy(TAllocator& new_alloc, + ThisType& to, + TAllocator& old_to_alloc) noexcept { if RAD_UNLIKELY (this == &to) { return NoError; } - Err res = to.Reserve(alloc, m_size); + bool ShouldStrongCopy = + AllocTraits::PropagateOnCopy && + !AllocTraits::Equal(new_alloc, old_to_alloc); + if (ShouldStrongCopy) + { + // handle the propagation case, which involves freeing with the old + // allocator and allocating with the new one. + return StrongCopy(new_alloc, to, old_to_alloc); + } + + // Not propagating allocators (equality or PropagateOnCopy=false). + // So always use the old allocator. + Err res = to.Reserve(old_to_alloc, m_size); if (!res.IsOk()) { return res.Err(); diff --git a/test/TestAlloc.cpp b/test/TestAlloc.cpp index d4a69a7..3d67103 100644 --- a/test/TestAlloc.cpp +++ b/test/TestAlloc.cpp @@ -19,98 +19,16 @@ namespace radtest { -uint32_t CountingAllocatorImpl::g_FreeCount = 0; -uint32_t CountingAllocatorImpl::g_AllocCount = 0; -uint32_t CountingAllocatorImpl::g_ReallocCount = 0; -uint32_t CountingAllocatorImpl::g_FreeBytesCount = 0; -uint32_t CountingAllocatorImpl::g_AllocBytesCount = 0; -uint32_t CountingAllocatorImpl::g_ReallocBytesCount = 0; - -void CountingAllocatorImpl::Free(void* ptr) noexcept -{ - ++g_FreeCount; - free(ptr); -} - -void* CountingAllocatorImpl::Alloc(uint32_t size) noexcept -{ - ++g_AllocCount; - return malloc(size); -} - -void* CountingAllocatorImpl::Realloc(void* ptr, uint32_t size) noexcept -{ - ++g_ReallocCount; - return realloc(ptr, size); -} - -void CountingAllocatorImpl::FreeBytes(void* ptr) noexcept -{ - ++g_FreeBytesCount; - free(ptr); -} - -void* CountingAllocatorImpl::AllocBytes(uint32_t size) noexcept -{ - ++g_AllocBytesCount; - return malloc(size); -} - -void* CountingAllocatorImpl::ReallocBytes(void* ptr, uint32_t size) noexcept -{ - ++g_ReallocBytesCount; - return realloc(ptr, size); -} - -uint32_t CountingAllocatorImpl::FreeCount() noexcept -{ - return g_FreeCount; -} - -uint32_t CountingAllocatorImpl::AllocCount() noexcept -{ - return g_AllocCount; -} - -uint32_t CountingAllocatorImpl::ReallocCount() noexcept -{ - return g_ReallocCount; -} - -uint32_t CountingAllocatorImpl::FreeBytesCount() noexcept -{ - return g_FreeBytesCount; -} - -uint32_t CountingAllocatorImpl::AllocBytesCount() noexcept -{ - return g_AllocBytesCount; -} - -uint32_t CountingAllocatorImpl::ReallocBytesCount() noexcept -{ - return g_ReallocBytesCount; -} - -void CountingAllocatorImpl::ResetCounts() noexcept -{ - g_FreeCount = 0; - g_AllocCount = 0; - g_ReallocCount = 0; - g_FreeBytesCount = 0; - g_AllocBytesCount = 0; - g_ReallocBytesCount = 0; -} - -bool CountingAllocatorImpl::VerifyCounts() noexcept -{ - return (g_AllocCount == g_FreeCount); -} - -bool CountingAllocatorImpl::VerifyCounts(uint32_t expectedAllocs, - uint32_t expectedFrees) noexcept -{ - return (g_AllocCount == expectedAllocs) && (g_FreeCount == expectedFrees); -} +const uint32_t StatefulAllocator::k_BadState; +const uint32_t StatefulCountingAllocator::k_BadState; + +uint32_t CountingAllocator::g_FreeCount = 0; +uint32_t CountingAllocator::g_AllocCount = 0; +size_t CountingAllocator::g_FreeBytesCount = 0; +size_t CountingAllocator::g_AllocBytesCount = 0; +uint32_t StatefulCountingAllocator::g_FreeCount = 0; +uint32_t StatefulCountingAllocator::g_AllocCount = 0; +size_t StatefulCountingAllocator::g_FreeBytesCount = 0; +size_t StatefulCountingAllocator::g_AllocBytesCount = 0; } // namespace radtest diff --git a/test/TestAlloc.h b/test/TestAlloc.h index f5a9cd6..798d03a 100644 --- a/test/TestAlloc.h +++ b/test/TestAlloc.h @@ -16,87 +16,167 @@ #include "gtest/gtest.h" +#include "radiant/TotallyRad.h" + +#include +#include +#include + namespace radtest { -static constexpr uint32_t k_BadState = 0xdeadc0de; static constexpr uint32_t k_MovedFromState = 0xc001d00d; -template -class Allocator +class Mallocator { public: - static constexpr bool NeedsFree = true; - static constexpr bool HasRealloc = true; - static constexpr bool HasAllocBytes = true; + static void* AllocBytes(size_t size) + { + return malloc(size); + } - using ThisType = Allocator; - using ValueType = T; - using SizeType = uint32_t; - using DifferenceType = ptrdiff_t; + static void FreeBytes(void* ptr, size_t size) noexcept + { + RAD_UNUSED(size); + free(ptr); + } - ~Allocator() = default; + static void HandleSizeOverflow() + { + } +}; - constexpr Allocator() noexcept = default; +inline void* TaggedAlloc(size_t size, uint32_t tag) +{ + static constexpr size_t kMaxSize = ~uint32_t(0); + if (size > kMaxSize) + { + return nullptr; + } + RAD_S_ASSERT(sizeof(max_align_t) >= 2 * sizeof(uint32_t)); - constexpr Allocator(const Allocator&) noexcept = default; + void* mem = malloc(size + sizeof(max_align_t)); + uint32_t* as_num = static_cast(mem); + as_num[0] = tag; + as_num[1] = static_cast(size); // range checked above + return static_cast(mem) + 1; +} - template - constexpr Allocator(const Allocator&) noexcept +inline void TaggedFree(void* ptr, size_t size, uint32_t tag) +{ + if (ptr == nullptr) { + return; } + void* alloc_begin = static_cast(ptr) - 1; + uint32_t* as_num = static_cast(alloc_begin); + EXPECT_EQ(as_num[0], tag); + EXPECT_EQ(as_num[1], size); + free(alloc_begin); +} - template - struct Rebind +class StickyTaggedAllocator +{ +public: + + // Propagates on false, because this is sticky + static constexpr bool PropagateOnCopy = false; + static constexpr bool PropagateOnMoveAssignment = false; + static constexpr bool PropagateOnSwap = false; + static constexpr bool IsAlwaysEqual = false; + + explicit StickyTaggedAllocator(uint32_t tag) + : m_tag(tag) { - using Other = Allocator; - }; + } - void Free(ValueType* ptr) noexcept + void* AllocBytes(size_t size) { - free(ptr); + return TaggedAlloc(size, m_tag); } - ValueType* Alloc(SizeType count) noexcept + void FreeBytes(void* ptr, size_t size) noexcept { - return (ValueType*)malloc(count * sizeof(T)); + TaggedFree(ptr, size, m_tag); } - ValueType* Realloc(ValueType* ptr, SizeType count) noexcept + static void HandleSizeOverflow() { - return (ValueType*)realloc(ptr, count * sizeof(T)); } - void FreeBytes(void* ptr) noexcept + bool operator==(const StickyTaggedAllocator& rhs) const noexcept { - free(ptr); + return m_tag == rhs.m_tag; } - void* AllocBytes(SizeType size) noexcept + uint32_t m_tag = 0; +}; + +class StickyDefaultTaggedAllocator : public StickyTaggedAllocator +{ +public: + + using StickyTaggedAllocator::StickyTaggedAllocator; + + explicit StickyDefaultTaggedAllocator() + : StickyTaggedAllocator(1234) + { + } +}; + +class MovingTaggedAllocator +{ +public: + + // Propagates on true, because this moves around + static constexpr bool PropagateOnCopy = true; + static constexpr bool PropagateOnMoveAssignment = true; + static constexpr bool PropagateOnSwap = true; + static constexpr bool IsAlwaysEqual = false; + + explicit MovingTaggedAllocator(uint32_t tag) + : m_tag(tag) { - return malloc(size); } - void* ReallocBytes(void* ptr, SizeType size) noexcept + void* AllocBytes(size_t size) { - return realloc(ptr, size); + return TaggedAlloc(size, m_tag); } + + void FreeBytes(void* ptr, size_t size) noexcept + { + TaggedFree(ptr, size, m_tag); + } + + static void HandleSizeOverflow() + { + } + + bool operator==(const MovingTaggedAllocator& rhs) const noexcept + { + return m_tag == rhs.m_tag; + } + + uint32_t m_tag = 0; }; -template class StatefulAllocator { public: - static constexpr bool NeedsFree = true; - static constexpr bool HasRealloc = true; - static constexpr bool HasAllocBytes = true; + static constexpr bool IsAlwaysEqual = false; + static constexpr bool PropagateOnMoveAssignment = true; + static constexpr bool PropagateOnCopy = true; + static constexpr bool PropagateOnSwap = true; - using ThisType = StatefulAllocator; - using ValueType = T; - using SizeType = uint32_t; - using DifferenceType = ptrdiff_t; + bool operator==(const StatefulAllocator& other) const + { + return m_state == other.m_state; + } + + static constexpr uint32_t k_BadState = 0xdeadc0de; ~StatefulAllocator() { @@ -111,12 +191,6 @@ class StatefulAllocator StatefulAllocator(const StatefulAllocator&) noexcept = default; StatefulAllocator& operator=(const StatefulAllocator&) noexcept = default; - template - StatefulAllocator(const StatefulAllocator& other) noexcept - : m_state(other.m_state) - { - } - StatefulAllocator(StatefulAllocator&& other) noexcept : m_state(other.m_state) { @@ -136,164 +210,64 @@ class StatefulAllocator return *this; } - template - struct Rebind - { - using Other = StatefulAllocator; - }; - - void Free(ValueType* ptr) noexcept + void FreeBytes(void* ptr, size_t byte_count) noexcept { EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; + RAD_UNUSED(byte_count); free(ptr); } - ValueType* Alloc(SizeType count) noexcept - { - EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - - return (ValueType*)malloc(count * sizeof(T)); - } - - ValueType* Realloc(ValueType* ptr, SizeType count) noexcept + void* AllocBytes(size_t byte_count) noexcept { EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - return (ValueType*)realloc(ptr, count * sizeof(T)); - } - - void FreeBytes(void* ptr) noexcept - { - EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - - free(ptr); - } - - void* AllocBytes(SizeType size) noexcept - { - EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - - return malloc(size); + return malloc(byte_count); } uint32_t m_state; }; -template class FailingAllocator { public: - static constexpr bool NeedsFree = true; - static constexpr bool HasRealloc = true; - static constexpr bool HasAllocBytes = true; - - using ThisType = FailingAllocator; - using ValueType = T; - using SizeType = uint32_t; - using DifferenceType = ptrdiff_t; - - ~FailingAllocator() = default; - - constexpr FailingAllocator() noexcept = default; - - constexpr FailingAllocator(const FailingAllocator&) noexcept = default; - - template - constexpr FailingAllocator(const FailingAllocator&) noexcept + void FreeBytes(void* mem, size_t byte_count) noexcept { + RAD_UNUSED(mem); + RAD_UNUSED(byte_count); } - template - struct Rebind - { - using Other = FailingAllocator; - }; - - void Free(ValueType*) noexcept - { - } - - ValueType* Alloc(SizeType) noexcept - { - return nullptr; - } - - ValueType* Realloc(ValueType*, SizeType) noexcept - { - return nullptr; - } - - void FreeBytes(void*) noexcept - { - } - - void* AllocBytes(SizeType) noexcept + void* AllocBytes(size_t byte_count) noexcept { + RAD_UNUSED(byte_count); return nullptr; } - void* ReallocBytes(void*, SizeType) noexcept + static void HandleSizeOverflow() { - return nullptr; } }; -template class OOMAllocator { public: - static constexpr bool NeedsFree = true; - static constexpr bool HasRealloc = true; - static constexpr bool HasAllocBytes = true; - - using ThisType = StatefulAllocator; - using ValueType = T; - using SizeType = uint32_t; - using DifferenceType = ptrdiff_t; - - ~OOMAllocator() - { - } + static constexpr bool IsAlwaysEqual = true; explicit OOMAllocator(int oom) noexcept : m_oom(oom) { } - OOMAllocator(const OOMAllocator&) noexcept = default; - - template - OOMAllocator(const OOMAllocator& other) noexcept - : m_oom(other.m_oom) - { - } - - template - struct Rebind + void FreeBytes(void* ptr, size_t byte_count) noexcept { - using Other = OOMAllocator; - }; + RAD_UNUSED(byte_count); - void Free(ValueType* ptr) noexcept - { free(ptr); } - ValueType* Alloc(SizeType count) noexcept - { - m_oom--; - if (m_oom < 0) - { - return nullptr; - } - - return (ValueType*)malloc(count * sizeof(T)); - } - - ValueType* Realloc(ValueType* ptr, SizeType count) noexcept + void* AllocBytes(size_t byte_count) noexcept { m_oom--; if (m_oom < 0) @@ -301,184 +275,120 @@ class OOMAllocator return nullptr; } - return (ValueType*)realloc(ptr, count * sizeof(T)); - } - - void FreeBytes(void* ptr) noexcept - { - free(ptr); + return malloc(byte_count); } - void* AllocBytes(SizeType size) noexcept + static void HandleSizeOverflow() { - m_oom--; - if (m_oom < 0) - { - return nullptr; - } - - return malloc(size); } int m_oom; }; -class CountingAllocatorImpl +class MoveOOMAllocator : public OOMAllocator { public: - static void Free(void* ptr) noexcept; - static void* Alloc(uint32_t size) noexcept; - static void* Realloc(void* ptr, uint32_t size) noexcept; - static void FreeBytes(void* ptr) noexcept; - static void* AllocBytes(uint32_t size) noexcept; - static void* ReallocBytes(void* ptr, uint32_t size) noexcept; - static uint32_t FreeCount() noexcept; - static uint32_t AllocCount() noexcept; - static uint32_t ReallocCount() noexcept; - static uint32_t FreeBytesCount() noexcept; - static uint32_t AllocBytesCount() noexcept; - static uint32_t ReallocBytesCount() noexcept; - static void ResetCounts() noexcept; - static bool VerifyCounts() noexcept; - static bool VerifyCounts(uint32_t expectedAllocs, - uint32_t expectedFrees) noexcept; - -private: - - static uint32_t g_FreeCount; - static uint32_t g_AllocCount; - static uint32_t g_ReallocCount; - static uint32_t g_FreeBytesCount; - static uint32_t g_AllocBytesCount; - static uint32_t g_ReallocBytesCount; -}; + static constexpr bool IsAlwaysEqual = false; + static constexpr bool PropagateOnCopy = true; + static constexpr bool PropagateOnMoveAssignment = true; + static constexpr bool PropagateOnSwap = true; -template -class CountingAllocator -{ -public: - - static constexpr bool NeedsFree = true; - static constexpr bool HasRealloc = true; - static constexpr bool HasAllocBytes = true; - - using ThisType = CountingAllocator; - using ValueType = T; - using SizeType = uint32_t; - using DifferenceType = ptrdiff_t; - - using Impl = CountingAllocatorImpl; - - ~CountingAllocator() = default; - - constexpr CountingAllocator() noexcept = default; - - constexpr CountingAllocator(const CountingAllocator&) noexcept = default; - - template - constexpr CountingAllocator(const CountingAllocator&) noexcept + explicit MoveOOMAllocator(int oom, int id) noexcept + : OOMAllocator(oom), + m_id(id) { } - template - struct Rebind + bool operator==(const MoveOOMAllocator& other) const noexcept { - using Other = CountingAllocator; - }; - - void Free(ValueType* ptr) noexcept - { - Impl::Free(ptr); - } - - ValueType* Alloc(SizeType count) noexcept - { - return (ValueType*)Impl::Alloc((sizeof(T) * count)); + return m_id == other.m_id; } - ValueType* Realloc(ValueType* ptr, SizeType count) noexcept - { - return (ValueType*)Impl::Realloc(ptr, (sizeof(T) * count)); - } - - void FreeBytes(void* ptr) noexcept - { - Impl::Free(ptr); - } + int m_id; +}; - void* AllocBytes(SizeType size) noexcept - { - return (ValueType*)Impl::Alloc(size); - } +class CountingAllocator +{ +public: - void* ReallocBytes(void* ptr, SizeType size) noexcept - { - return Impl::Realloc(ptr, size); - } + static uint32_t g_FreeCount; + static uint32_t g_AllocCount; + static size_t g_FreeBytesCount; + static size_t g_AllocBytesCount; - uint32_t FreeCount() const noexcept + void FreeBytes(void* ptr, size_t byte_count) noexcept { - return Impl::FreeCount(); - } + RAD_UNUSED(byte_count); - uint32_t AllocCount() const noexcept - { - return Impl::AllocCount(); + if (ptr != nullptr) + { + ++g_FreeCount; + g_FreeBytesCount += byte_count; + } + free(ptr); } - uint32_t ReallocCount() const noexcept + void* AllocBytes(size_t byte_count) noexcept { - return Impl::ReallocCount(); + ++g_AllocCount; + g_AllocBytesCount += byte_count; + return malloc(byte_count); } - uint32_t FreeBytesCount() const noexcept + static void HandleSizeOverflow() { - return Impl::FreeBytesCount(); } - uint32_t AllocBytesCount() const noexcept + uint32_t FreeCount() const noexcept { - return Impl::AllocBytesCount(); + return g_FreeCount; } - uint32_t ReallocBytesCount() const noexcept + uint32_t AllocCount() const noexcept { - return Impl::ReallocBytesCount(); + return g_AllocCount; } void ResetCounts() noexcept { - Impl::ResetCounts(); + g_AllocBytesCount = g_FreeBytesCount = g_FreeCount = g_AllocCount = 0; } - bool VerifyCounts() const noexcept + void VerifyCounts() const noexcept { - return Impl::VerifyCounts(); + EXPECT_EQ(g_AllocCount, g_FreeCount); + EXPECT_EQ(g_AllocBytesCount, g_FreeBytesCount); } - bool VerifyCounts(uint32_t expectedAllocs, + void VerifyCounts(uint32_t expectedAllocs, uint32_t expectedFrees) const noexcept { - return Impl::VerifyCounts(expectedAllocs, expectedFrees); + EXPECT_EQ(g_AllocCount, expectedAllocs); + EXPECT_EQ(g_FreeCount, expectedFrees); } }; -template class StatefulCountingAllocator { public: - static constexpr bool NeedsFree = true; - static constexpr bool HasRealloc = true; - static constexpr bool HasAllocBytes = true; + static constexpr bool IsAlwaysEqual = false; + static constexpr bool PropagateOnMoveAssignment = true; + static constexpr bool PropagateOnCopy = true; + static constexpr bool PropagateOnSwap = true; - using ThisType = StatefulCountingAllocator; - using ValueType = T; - using SizeType = uint32_t; - using DifferenceType = ptrdiff_t; + bool operator==(const StatefulCountingAllocator& other) const + { + return m_state == other.m_state; + } + + static uint32_t g_FreeCount; + static uint32_t g_AllocCount; + static size_t g_AllocBytesCount; + static size_t g_FreeBytesCount; - using Impl = CountingAllocatorImpl; + static constexpr uint32_t k_BadState = 0xdeadc0de; ~StatefulCountingAllocator() { @@ -493,119 +403,77 @@ class StatefulCountingAllocator StatefulCountingAllocator(const StatefulCountingAllocator&) noexcept = default; - template - StatefulCountingAllocator( - const StatefulCountingAllocator& other) noexcept - : m_state(other.m_state) - { - } - - template - struct Rebind - { - using Other = StatefulCountingAllocator; - }; - - void Free(ValueType* ptr) noexcept - { - EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - - Impl::Free(ptr); - } - - ValueType* Alloc(SizeType count) noexcept - { - EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - - return (ValueType*)Impl::Alloc((sizeof(T) * count)); - } - - ValueType* Realloc(ValueType* ptr, SizeType count) noexcept + void FreeBytes(void* ptr, size_t byte_count) noexcept { - EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - - return (ValueType*)Impl::Realloc(ptr, (sizeof(T) * count)); - } + RAD_UNUSED(byte_count); - void FreeBytes(void* ptr) noexcept - { EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - - Impl::Free(ptr); + if (ptr != nullptr) + { + ++g_FreeCount; + g_FreeBytesCount += byte_count; + } + free(ptr); } - void* AllocBytes(SizeType size) noexcept + void* AllocBytes(size_t byte_count) noexcept { EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - return (ValueType*)Impl::Alloc(size); + ++g_AllocCount; + g_AllocBytesCount += byte_count; + return malloc(byte_count); } - void* ReallocBytes(void* ptr, SizeType size) noexcept + static void HandleSizeOverflow() { - EXPECT_NE(m_state, k_BadState) << "Allocator used after destruction"; - - return Impl::Realloc(ptr, size); } uint32_t FreeCount() const noexcept { - return Impl::FreeCount(); + return g_FreeCount; } uint32_t AllocCount() const noexcept { - return Impl::AllocCount(); - } - - uint32_t ReallocCount() const noexcept - { - return Impl::ReallocCount(); - } - - uint32_t FreeBytesCount() const noexcept - { - return Impl::FreeBytesCount(); - } - - uint32_t AllocBytesCount() const noexcept - { - return Impl::AllocBytesCount(); - } - - uint32_t ReallocBytesCount() const noexcept - { - return Impl::ReallocBytesCount(); + return g_AllocCount; } void ResetCounts() noexcept { - Impl::ResetCounts(); + g_AllocBytesCount = g_FreeBytesCount = g_FreeCount = g_AllocCount = 0; } - bool VerifyCounts() const noexcept + void VerifyCounts() const noexcept { - return Impl::VerifyCounts(); + EXPECT_EQ(g_AllocCount, g_FreeCount); + EXPECT_EQ(g_AllocBytesCount, g_FreeBytesCount); } - bool VerifyCounts(uint32_t expectedAllocs, + void VerifyCounts(uint32_t expectedAllocs, uint32_t expectedFrees) const noexcept { - return Impl::VerifyCounts(expectedAllocs, expectedFrees); + EXPECT_EQ(g_AllocCount, expectedAllocs); + EXPECT_EQ(g_FreeCount, expectedFrees); } uint32_t m_state; }; -struct HeapAllocator +struct HeapResource { - void Free(void* ptr) + void FreeBytes(void* ptr, size_t byte_count) { - freeCount++; + RAD_UNUSED(byte_count); + + if (ptr != nullptr) + { + freeCount++; + } free(ptr); } - void* Alloc(uint32_t count) + void* AllocBytes(size_t byte_count) { if (forceAllocFails > 0) { @@ -622,49 +490,7 @@ struct HeapAllocator } allocCount++; - return malloc(count); - } - - void* Realloc(void* ptr, uint32_t count) - { - if (forceReallocFails > 0) - { - forceReallocFails--; - return nullptr; - } - - reallocCount++; - return realloc(ptr, count); - } - - void FreeBytes(void* ptr) - { - freeBytesCount++; - free(ptr); - } - - void* AllocBytes(uint32_t count) - { - if (forceAllocBytesFails > 0) - { - forceAllocBytesFails--; - return nullptr; - } - - allocBytesCount++; - return malloc(count); - } - - void* ReallocBytes(void* ptr, uint32_t count) - { - if (forceReallocBytesFails > 0) - { - forceReallocBytesFails--; - return nullptr; - } - - reallocBytesCount++; - return realloc(ptr, count); + return malloc(byte_count); } int32_t freeCount{ 0 }; @@ -672,85 +498,70 @@ struct HeapAllocator int32_t forceFutureAllocFail{ 0 }; int32_t forceAllocFails{ 0 }; int32_t allocCount{ 0 }; - - uint32_t forceReallocFails{ 0 }; - uint32_t reallocCount{ 0 }; - - uint32_t freeBytesCount{ 0 }; - - uint32_t forceAllocBytesFails{ 0 }; - uint32_t allocBytesCount{ 0 }; - - uint32_t forceReallocBytesFails{ 0 }; - uint32_t reallocBytesCount{ 0 }; }; -template -class AllocWrapper +template +class ResourceAllocator { public: - static constexpr bool NeedsFree = true; - static constexpr bool HasRealloc = true; - static constexpr bool HasAllocBytes = true; - - using ThisType = AllocWrapper; - using ValueType = T; - using SizeType = uint32_t; - using DifferenceType = ptrdiff_t; - - ~AllocWrapper() = default; + static constexpr bool IsAlwaysEqual = false; + static constexpr bool PropagateOnMoveAssignment = true; + static constexpr bool PropagateOnCopy = true; + static constexpr bool PropagateOnSwap = true; - constexpr AllocWrapper(TBase& alloc) noexcept - : base(&alloc) + bool operator==(const ResourceAllocator& other) const noexcept { + return m_res == other.m_res; } - constexpr AllocWrapper(const AllocWrapper&) noexcept = default; - - template - constexpr AllocWrapper(const AllocWrapper& other) noexcept - : base(other.base) + constexpr ResourceAllocator(Res& res) noexcept + : m_res(&res) { } - template - struct Rebind - { - using Other = AllocWrapper; - }; - - void Free(ValueType* ptr) noexcept + void FreeBytes(void* ptr, size_t byte_count) noexcept { - base->Free(ptr); + m_res->FreeBytes(ptr, byte_count); } - ValueType* Alloc(SizeType count) noexcept + void* AllocBytes(size_t byte_count) noexcept { - return (ValueType*)base->Alloc(count * sizeof(T)); + return m_res->AllocBytes(byte_count); } - ValueType* Realloc(ValueType* ptr, SizeType count) noexcept + static void HandleSizeOverflow() { - return (ValueType*)base->Realloc(ptr, count * sizeof(T)); } - void FreeBytes(void* ptr) noexcept - { - base->FreeBytes(ptr); - } + Res* m_res; +}; + +class TypedAllocator +{ +public: + + static constexpr bool IsAlwaysEqual = true; + static constexpr bool HasTypedAllocations = true; - void* AllocBytes(SizeType size) noexcept + template + static constexpr T* Alloc(size_t count) { - return base->AllocBytes(size); + if (count > UINT32_MAX / sizeof(T)) + { + return nullptr; + } + + void* raw = malloc(count * sizeof(T)); + return static_cast(raw); } - void* ReallocBytes(void* ptr, SizeType size) noexcept + template + static constexpr void Free(T* ptr, size_t count) noexcept { - return base->ReallocBytes(ptr, size); + RAD_UNUSED(count); + free(ptr); } - - TBase* base; }; } // namespace radtest diff --git a/test/test_Allocators.cpp b/test/test_Allocators.cpp new file mode 100644 index 0000000..14a4b4b --- /dev/null +++ b/test/test_Allocators.cpp @@ -0,0 +1,372 @@ +// Copyright 2024 The Radiant Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "gtest/gtest.h" + +#include "radiant/Memory.h" + +#include "TestAlloc.h" + +struct TrackCtorDtor +{ + static int CtorCount; + static int DtorCount; + static int OtherCtorCount; + + TrackCtorDtor() + { + ++CtorCount; + } + + explicit TrackCtorDtor(uint64_t val) + : m_dummy_data(val) + { + ++OtherCtorCount; + } + + ~TrackCtorDtor() + { + ++DtorCount; + } + + static void Reset() + { + CtorCount = 0; + DtorCount = 0; + OtherCtorCount = 0; + } + + uint64_t m_dummy_data; +}; + +int TrackCtorDtor::CtorCount = 0; +int TrackCtorDtor::DtorCount = 0; +int TrackCtorDtor::OtherCtorCount = 0; + +TEST(AllocatorTests, MinimalAlloc) +{ + radtest::Mallocator mal; + using allt = rad::AllocTraits; + RAD_S_ASSERT(allt::PropagateOnCopy == false); + RAD_S_ASSERT(allt::PropagateOnMoveAssignment == false); + RAD_S_ASSERT(allt::PropagateOnSwap == false); + RAD_S_ASSERT(allt::IsAlwaysEqual == true); + RAD_S_ASSERT(allt::HasConstructAndDestroy == false); + RAD_S_ASSERT(allt::HasTypedAllocations == false); + + { + void* vmem = allt::AllocBytes(mal, 1); + EXPECT_NE(vmem, nullptr); + allt::FreeBytes(mal, vmem, 1); + + allt::FreeBytes(mal, nullptr, 1); + } + + { + static constexpr size_t kBiggest = ~size_t(0); + static constexpr size_t kEltsToAttempt = kBiggest / 2; + + uint64_t* too_much = allt::Alloc(mal, kEltsToAttempt); + EXPECT_EQ(too_much, nullptr); + allt::Free(mal, too_much, kEltsToAttempt); + + uint64_t* unull = nullptr; + allt::Free(mal, unull, kEltsToAttempt); + } + + { + TrackCtorDtor::Reset(); + TrackCtorDtor* tmem = allt::Alloc(mal, 4); + EXPECT_NE(tmem, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 0); + EXPECT_EQ(TrackCtorDtor::DtorCount, 0); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + // check alignment + uintptr_t mask = alignof(TrackCtorDtor) - 1; + uintptr_t tmem_num = reinterpret_cast(tmem); + EXPECT_EQ((tmem_num & mask), 0u); + + TrackCtorDtor* constructed = allt::Construct(mal, tmem); + EXPECT_NE(constructed, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 0); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + allt::Destroy(mal, constructed); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 1); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + TrackCtorDtor* other_constructed = allt::Construct(mal, tmem, 42u); + EXPECT_NE(other_constructed, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 1); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + + allt::Destroy(mal, other_constructed); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 2); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + + allt::Free(mal, tmem, 4); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 2); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + } + + { + radtest::Mallocator mal2; + EXPECT_TRUE(allt::Equal(mal, mal2)); + EXPECT_TRUE(allt::Equal(mal2, mal)); + EXPECT_TRUE(allt::Equal(mal, radtest::Mallocator{})); + } +} + +TEST(AllocatorTests, PropagatingAllocator) +{ + constexpr uint32_t kMainTag = 42; + radtest::MovingTaggedAllocator tal(kMainTag); + using allt = rad::AllocTraits; + RAD_S_ASSERT(allt::PropagateOnCopy == true); + RAD_S_ASSERT(allt::PropagateOnMoveAssignment == true); + RAD_S_ASSERT(allt::PropagateOnSwap == true); + RAD_S_ASSERT(allt::IsAlwaysEqual == false); + RAD_S_ASSERT(allt::HasConstructAndDestroy == false); + RAD_S_ASSERT(allt::HasTypedAllocations == false); + + { + void* vmem = allt::AllocBytes(tal, 1); + EXPECT_NE(vmem, nullptr); + allt::FreeBytes(tal, vmem, 1); + + allt::FreeBytes(tal, nullptr, 1); + } + + { + static constexpr size_t kBiggest = ~size_t(0); + static constexpr size_t kEltsToAttempt = kBiggest / 2; + + uint64_t* too_much = allt::Alloc(tal, kEltsToAttempt); + EXPECT_EQ(too_much, nullptr); + allt::Free(tal, too_much, kEltsToAttempt); + + uint64_t* unull = nullptr; + allt::Free(tal, unull, kEltsToAttempt); + } + + { + TrackCtorDtor::Reset(); + TrackCtorDtor* tmem = allt::Alloc(tal, 4); + EXPECT_NE(tmem, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 0); + EXPECT_EQ(TrackCtorDtor::DtorCount, 0); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + // check alignment + uintptr_t mask = alignof(TrackCtorDtor) - 1; + uintptr_t tmem_num = reinterpret_cast(tmem); + EXPECT_EQ((tmem_num & mask), 0u); + + TrackCtorDtor* constructed = allt::Construct(tal, tmem); + EXPECT_NE(constructed, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 0); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + allt::Destroy(tal, constructed); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 1); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + TrackCtorDtor* other_constructed = allt::Construct(tal, tmem, 42u); + EXPECT_NE(other_constructed, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 1); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + + allt::Destroy(tal, other_constructed); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 2); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + + allt::Free(tal, tmem, 4); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 2); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + } + + { + radtest::MovingTaggedAllocator tal2 = tal; + EXPECT_TRUE(allt::Equal(tal, tal2)); + EXPECT_TRUE(allt::Equal(tal2, tal)); + + radtest::MovingTaggedAllocator tal3(kMainTag); + EXPECT_TRUE(allt::Equal(tal, tal3)); + EXPECT_TRUE(allt::Equal(tal3, tal)); + + radtest::MovingTaggedAllocator tal4(kMainTag + 1); + EXPECT_FALSE(allt::Equal(tal, tal4)); + EXPECT_FALSE(allt::Equal(tal4, tal)); + } +} + +#if RAD_ENABLE_STD +TEST(AllocatorTests, StdAllocator) +{ + rad::StdAllocator sal; + using allt = rad::AllocTraits; + RAD_S_ASSERT(allt::PropagateOnCopy == false); + RAD_S_ASSERT(allt::PropagateOnMoveAssignment == false); + RAD_S_ASSERT(allt::PropagateOnSwap == false); + RAD_S_ASSERT(allt::IsAlwaysEqual == true); + RAD_S_ASSERT(allt::HasConstructAndDestroy == true); + RAD_S_ASSERT(allt::HasTypedAllocations == true); + + { + void* vmem = allt::AllocBytes(sal, 1); + EXPECT_NE(vmem, nullptr); + allt::FreeBytes(sal, vmem, 1); + } + + { + TrackCtorDtor::Reset(); + TrackCtorDtor* tmem = allt::Alloc(sal, 4); + EXPECT_NE(tmem, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 0); + EXPECT_EQ(TrackCtorDtor::DtorCount, 0); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + // check alignment + uintptr_t mask = alignof(TrackCtorDtor) - 1; + uintptr_t tmem_num = reinterpret_cast(tmem); + EXPECT_EQ((tmem_num & mask), 0u); + + TrackCtorDtor* constructed = allt::Construct(sal, tmem); + EXPECT_NE(constructed, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 0); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + allt::Destroy(sal, constructed); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 1); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 0); + + TrackCtorDtor* other_constructed = allt::Construct(sal, tmem, 42); + EXPECT_NE(other_constructed, nullptr); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 1); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + + allt::Destroy(sal, other_constructed); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 2); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + + allt::Free(sal, tmem, 4); + EXPECT_EQ(TrackCtorDtor::CtorCount, 1); + EXPECT_EQ(TrackCtorDtor::DtorCount, 2); + EXPECT_EQ(TrackCtorDtor::OtherCtorCount, 1); + } + + { + rad::StdAllocator sal2; + EXPECT_TRUE(allt::Equal(sal, sal2)); + EXPECT_TRUE(allt::Equal(sal2, sal)); + EXPECT_TRUE(allt::Equal(sal, rad::StdAllocator{})); + + EXPECT_TRUE(sal == sal2); + EXPECT_FALSE(sal != sal2); + } +} + +#if RAD_CPP20 +constexpr bool test_constexpr_StdAllocator() +{ + rad::StdAllocator sal; + using allt = rad::AllocTraits; + + { + uint64_t* tmem = allt::Alloc(sal, 4); + assert(tmem != nullptr); + + uint64_t* constructed = allt::Construct(sal, tmem); + assert(constructed != nullptr); + + *constructed = 19; + + allt::Destroy(sal, constructed); + + uint64_t* other_constructed = allt::Construct(sal, tmem, 42); + assert(other_constructed != nullptr); + assert(*other_constructed == 42); + + allt::Destroy(sal, other_constructed); + allt::Free(sal, tmem, 4); + } + + { + rad::StdAllocator sal2; + RAD_UNUSED(sal2); + assert(allt::Equal(sal, sal2)); + assert(allt::Equal(sal2, sal)); + assert(allt::Equal(sal, rad::StdAllocator{})); + } + return true; +} + +static_assert(test_constexpr_StdAllocator()); + +#endif // RAD_CPP20 ^^^ + +#endif // RAD_ENABLE_STD ^^^ + +struct RadTestExceptionType +{ +}; + +struct ThrowingMallocator : radtest::Mallocator +{ + static void HandleSizeOverflow() + { + throw RadTestExceptionType(); + } +}; + +struct BigType +{ + char buf[1 << 30]; // 1 GB +}; + +TEST(AllocatorTests, ThrowingAllocator) +{ + ThrowingMallocator tm; +#if RAD_AMD64 || RAD_ARM64 + size_t elt_count = 1ull << 40; +#elif RAD_I386 || RAD_ARM + size_t elt_count = 4; +#else +#error "Unknown platform" +#endif + + BigType* b = nullptr; + + using AllocatorTraits = rad::AllocTraits; + + EXPECT_THROW(b = AllocatorTraits::template Alloc(tm, elt_count); + , RadTestExceptionType); + EXPECT_EQ(b, nullptr); + AllocatorTraits::template Free(tm, b, elt_count); +} diff --git a/test/test_EmptyOptimizedPair.cpp b/test/test_EmptyOptimizedPair.cpp index 5cd6c19..3c79586 100644 --- a/test/test_EmptyOptimizedPair.cpp +++ b/test/test_EmptyOptimizedPair.cpp @@ -90,12 +90,12 @@ struct ThrowingStateful RAD_S_ASSERT(noexcept(rad::EmptyOptimizedPair())); RAD_S_ASSERT(!noexcept(rad::EmptyOptimizedPair())); RAD_S_ASSERT(!noexcept(rad::EmptyOptimizedPair())); -RAD_S_ASSERT(noexcept(rad::EmptyOptimizedPair, bool>( - rad::DeclVal&>(), true))); -RAD_S_ASSERT(noexcept(rad::EmptyOptimizedPair, bool>( - rad::DeclVal&>(), true))); -RAD_S_ASSERT(noexcept(rad::EmptyOptimizedPair, bool>( - rad::DeclVal&&>(), true))); +RAD_S_ASSERT(noexcept(rad::EmptyOptimizedPair( + rad::DeclVal(), true))); +RAD_S_ASSERT(noexcept(rad::EmptyOptimizedPair( + rad::DeclVal(), true))); +RAD_S_ASSERT(noexcept(rad::EmptyOptimizedPair( + rad::DeclVal(), true))); // empty copy ctors RAD_S_ASSERT(noexcept(rad::EmptyOptimizedPair( diff --git a/test/test_List.cpp b/test/test_List.cpp index 38f5b71..6820f81 100644 --- a/test/test_List.cpp +++ b/test/test_List.cpp @@ -11,7 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -#define RAD_DEFAULT_ALLOCATOR radtest::Allocator +#define RAD_DEFAULT_ALLOCATOR radtest::Mallocator #include "test/TestAlloc.h" #include "test/TestInputStringLiteralRange.h" @@ -80,7 +80,7 @@ TEST(ListTest, DefaultConstructIsEmpty) TEST(ListTest, AllocatorConstructors) { - using StatefulAlloc = radtest::StatefulAllocator; + using StatefulAlloc = radtest::StatefulAllocator; { rad::List default_ctor; StatefulAlloc default_ctor_alloc = default_ctor.GetAllocator(); @@ -94,22 +94,21 @@ TEST(ListTest, AllocatorConstructors) StatefulAlloc alloc = alloc_ctor.GetAllocator(); EXPECT_EQ(alloc.m_state, 42u); - // regular move constructor needs to steal the original allocator + // regular move constructor needs to copy the original allocator rad::List moving_alloc_ctor(std::move(alloc_ctor)); StatefulAlloc moved_from_alloc = alloc_ctor.GetAllocator(); - EXPECT_EQ(moved_from_alloc.m_state, radtest::k_MovedFromState); + EXPECT_EQ(moved_from_alloc.m_state, 42u); StatefulAlloc moved_to_alloc = moving_alloc_ctor.GetAllocator(); EXPECT_EQ(moved_to_alloc.m_state, 42u); StatefulAlloc source_alloc2; source_alloc2.m_state = 99; rad::List move_assigned(source_alloc2); + // moving_alloc_ctor's "42" allocator will propagate to move_assigned move_assigned = std::move(moving_alloc_ctor); StatefulAlloc assigned_from_alloc = moving_alloc_ctor.GetAllocator(); - // assigned_from_alloc should have whatever move_assigned had in it - // before - EXPECT_EQ(assigned_from_alloc.m_state, 99u); + EXPECT_EQ(assigned_from_alloc.m_state, 42U); StatefulAlloc assigned_to_alloc = move_assigned.GetAllocator(); EXPECT_EQ(assigned_to_alloc.m_state, 42u); } @@ -117,11 +116,11 @@ TEST(ListTest, AllocatorConstructors) TEST(ListTest, PushBackFailureRecovery) { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); { - rad::List> list( + rad::List> list( alloc); EXPECT_TRUE(list.PushBack(1).IsOk()); @@ -408,6 +407,76 @@ TEST(ListTest, MoveConstruct) } } +TEST(ListTest, MoveAssign) +{ + { + rad::List empty; + rad::List move_from_empty; + move_from_empty = std::move(empty); + ListEqual(empty, {}); + ListEqual(move_from_empty, {}); + } + { + rad::List one; + EXPECT_TRUE(one.PushBack(1).IsOk()); + rad::List move_from_one; + + move_from_one = std::move(one); + + ListEqual(one, {}); + ListEqual(move_from_one, { 1 }); + } + { + rad::List two; + EXPECT_TRUE(two.AssignRange(std::initializer_list{ 1, 2 }).IsOk()); + + rad::List move_from_two; + + move_from_two = std::move(two); + ListEqual(two, {}); + ListEqual(move_from_two, { 1, 2 }); + } + { + rad::List one; + EXPECT_TRUE(one.PushBack(1).IsOk()); + + rad::List move_from_one; + move_from_one = std::move(one); + + // ensure we can still mutate after moves + EXPECT_TRUE(one.PushBack(101).IsOk()); + EXPECT_TRUE(move_from_one.PushBack(201).IsOk()); + ListEqual(one, { 101 }); + ListEqual(move_from_one, { 1, 201 }); + } +#ifdef RAD_GCC_VERSION +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wself-move" +#endif +#ifdef RAD_CLANG_VERSION +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wself-move" +#endif + { + rad::List self_move; + self_move = std::move(self_move); + ListEqual(self_move, {}); + } + { + rad::List self_move; + EXPECT_TRUE(self_move.PushBack(101).IsOk()); + self_move = std::move(self_move); + EXPECT_TRUE(self_move.PushBack(102).IsOk()); + ListEqual(self_move, { 101, 102 }); + } +#ifdef RAD_CLANG_VERSION +#pragma clang diagnostic pop +#endif +#ifdef RAD_GCC_VERSION +#pragma GCC diagnostic pop +#endif +} + TEST(ListTest, ClearSome) { rad::List i; @@ -519,10 +588,10 @@ TEST(ListTest, AssignCount) TEST(ListTest, Emplace) { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); rad::List> + radtest::ResourceAllocator> input(alloc); // emplace at the end @@ -571,10 +640,9 @@ TEST(ListTest, Emplace) TEST(ListTest, MoveInsert) { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> input(alloc); MoveStruct ms; @@ -641,10 +709,9 @@ TEST(ListTest, MoveInsert) TEST(ListTest, CopyInsert) { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> input(alloc); CopyStruct cs; @@ -710,9 +777,9 @@ TEST(ListTest, CopyInsert) TEST(ListTest, AssignFailure) { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> list( + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> list( alloc); // AssignCount fails back to empty when it starts empty @@ -816,6 +883,29 @@ TEST(ListTest, PostIncrPostDecr) EXPECT_EQ(*post_cend--, 2); } +#if !RAD_DBG +TEST(ListTest, SelfSplice) +{ + rad::List list; + EXPECT_TRUE(list.AssignRange(std::initializer_list{ 1, 2, 3 }).IsOk()); + + list.SpliceAll(list.end(), list); + ListEqual(list, { 1, 2, 3 }); + list.SpliceAll(++list.begin(), list); + ListEqual(list, { 1, 2, 3 }); + + list.SpliceOne(list.end(), list, list.begin()); + ListEqual(list, { 1, 2, 3 }); + list.SpliceOne(list.begin(), list, ++list.begin()); + ListEqual(list, { 1, 2, 3 }); + + list.SpliceSome(list.end(), list, list.begin(), list.end()); + ListEqual(list, { 1, 2, 3 }); + list.SpliceSome(list.begin(), list, list.begin(), ++list.begin()); + ListEqual(list, { 1, 2, 3 }); +} +#endif + TEST(ListTest, SpliceSomeEmpties) { std::array arr = { 101, 203, 304 }; @@ -1432,9 +1522,9 @@ TEST(ListTest, PrependRange) chars.PrependRange(radtest::TestInputStringLiteralRange("abc")).IsOk()); ListEqual(chars, { 'a', 'b', 'c', 'x', 'y', 'z' }); - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> list( + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> list( alloc); EXPECT_TRUE(list.AssignRange(std::initializer_list{ 1, 2, 3 }).IsOk()); @@ -1468,9 +1558,9 @@ TEST(ListTest, AppendRange) chars.AppendRange(radtest::TestInputStringLiteralRange("abc")).IsOk()); ListEqual(chars, { 'x', 'y', 'z', 'a', 'b', 'c' }); - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> list( + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> list( alloc); EXPECT_TRUE(list.AssignRange(std::initializer_list{ 1, 2, 3 }).IsOk()); @@ -1521,9 +1611,9 @@ TEST(ListTest, InsertRange) ListEqual(chars, { 'x', 'a', 'b', 'c', 'y', 'z' }); } { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> list( + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> list( alloc); EXPECT_TRUE( @@ -1566,9 +1656,9 @@ TEST(ListTest, InsertSome) ListEqual(dest, { 0, 100, 101, 1, 2, 3 }); } { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> list( + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> list( alloc); EXPECT_TRUE( @@ -1609,9 +1699,9 @@ TEST(ListTest, InsertInitializerList) ListEqual(dest, { 0, 100, 101, 1, 2, 3 }); } { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> list( + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> list( alloc); EXPECT_TRUE( @@ -1652,9 +1742,9 @@ TEST(ListTest, InsertCount) ListEqual(dest, { 0, 100, 100, 1, 2, 3 }); } { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> list( + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> list( alloc); EXPECT_TRUE( @@ -1689,9 +1779,9 @@ TEST(ListTest, Clone) ListEqual(li2.Ok(), { 1, 2, 3 }); } { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); - rad::List> list( + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); + rad::List> list( alloc); EXPECT_TRUE( @@ -2197,6 +2287,51 @@ TEST(ListTest, FrontBack) } } +TEST(ListTest, Swap) +{ + { + rad::List list1; + EXPECT_TRUE( + list1.AssignRange(std::initializer_list{ 1, 2, 3, 4, 5 }) + .IsOk()); + + rad::List list2; + EXPECT_TRUE( + list2.AssignRange(std::initializer_list{ 11, 12, 13, 14, 15 }) + .IsOk()); + + list1.Swap(list2); + ListEqual(list1, { 11, 12, 13, 14, 15 }); + ListEqual(list2, { 1, 2, 3, 4, 5 }); + } + { + rad::List list1; + EXPECT_TRUE( + list1.AssignRange(std::initializer_list{ 1, 2, 3, 4, 5 }) + .IsOk()); + + rad::List list2; + + list1.Swap(list2); + ListEqual(list1, {}); + ListEqual(list2, { 1, 2, 3, 4, 5 }); + } + { + rad::List list1; + EXPECT_TRUE( + list1.AssignRange(std::initializer_list{ 1, 2, 3, 4, 5 }) + .IsOk()); + + list1.Swap(list1); + ListEqual(list1, { 1, 2, 3, 4, 5 }); + } + { + rad::List list1; + list1.Swap(list1); + ListEqual(list1, {}); + } +} + TEST(ListTest, ReverseIterators) { { @@ -2237,3 +2372,109 @@ TEST(ListTest, ReverseIterators) ListEqual(list2, { 5, 4, 3, 2, 1 }); } } + +#ifdef RAD_GCC_VERSION +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wmultichar" +#endif +TEST(ListTest, TroubleAllocators) +{ + { + using StickyList = rad::List; + StickyList list1(radtest::StickyTaggedAllocator{ 'tst1' }); + EXPECT_TRUE(list1.PushBack(1).IsOk()); + + StickyList list2(std::move(list1)); + EXPECT_TRUE(list1.Empty()); + EXPECT_EQ(list1.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(list2.ExpensiveSize(), 1u); + EXPECT_EQ(list2.GetAllocator().m_tag, uint32_t('tst1')); + + // These all static_assert, as they should + // list1 = std::move(list2); + // list1.Swap(list2); + // list1.SpliceAll(list1.end(), list2); + // list1.Clone(); + } + { + using StickyDefaultList = + rad::List; + StickyDefaultList list1( + radtest::StickyDefaultTaggedAllocator{ 'tst1' }); + EXPECT_TRUE(list1.PushBack(1).IsOk()); + + StickyDefaultList list2(std::move(list1)); + EXPECT_TRUE(list1.Empty()); + EXPECT_EQ(list1.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(list2.ExpensiveSize(), 1u); + EXPECT_EQ(list2.GetAllocator().m_tag, uint32_t('tst1')); + + auto expected_list3 = list2.Clone(); + EXPECT_EQ(list2.ExpensiveSize(), 1u); + EXPECT_EQ(expected_list3.Ok().ExpensiveSize(), 1u); + EXPECT_EQ(expected_list3.Ok().GetAllocator().m_tag, uint32_t(1234)); + + // These all static_assert, as they should + // list1 = std::move(list2); + // list1.Swap(list2); + // list1.SpliceAll(list1.end(), list2); + } + { + using MovingList = rad::List; + MovingList list1(radtest::MovingTaggedAllocator{ 'abcd' }); + EXPECT_TRUE(list1.PushBack(1).IsOk()); + + MovingList list2(std::move(list1)); + EXPECT_TRUE(list1.Empty()); + EXPECT_EQ(list2.ExpensiveSize(), 1u); + + auto expected_list3 = list2.Clone(); + EXPECT_EQ(list2.ExpensiveSize(), 1u); + EXPECT_EQ(expected_list3.Ok().ExpensiveSize(), 1u); + + MovingList list4(radtest::MovingTaggedAllocator{ 'wxyz' }); + EXPECT_TRUE(list1.PushBack(99).IsOk()); + list1.Swap(list4); + EXPECT_TRUE(list1.Empty()); + EXPECT_EQ(list4.ExpensiveSize(), 1u); + EXPECT_EQ(list1.GetAllocator().m_tag, uint32_t('wxyz')); + EXPECT_EQ(list4.GetAllocator().m_tag, uint32_t('abcd')); + + list1 = std::move(list4); + EXPECT_TRUE(list4.Empty()); + EXPECT_EQ(list1.ExpensiveSize(), 1u); + EXPECT_EQ(list1.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(list4.GetAllocator().m_tag, uint32_t('abcd')); + + // This static_asserts, as it should + // list1.SpliceAll(list1.end(), list2); + } + { + using TypedList = rad::List; + TypedList list1; + EXPECT_TRUE(list1.PushBack(1).IsOk()); + + TypedList list2(std::move(list1)); + EXPECT_TRUE(list1.Empty()); + EXPECT_EQ(list2.ExpensiveSize(), 1u); + + auto expected_list3 = list2.Clone(); + EXPECT_EQ(list2.ExpensiveSize(), 1u); + EXPECT_EQ(expected_list3.Ok().ExpensiveSize(), 1u); + + list1 = std::move(list2); + EXPECT_TRUE(list2.Empty()); + EXPECT_EQ(list1.ExpensiveSize(), 1u); + + list1.Swap(list2); + EXPECT_TRUE(list1.Empty()); + EXPECT_EQ(list2.ExpensiveSize(), 1u); + + list1.SpliceAll(list1.end(), list2); + EXPECT_TRUE(list2.Empty()); + EXPECT_EQ(list1.ExpensiveSize(), 1u); + } +} +#ifdef RAD_GCC_VERSION +#pragma GCC diagnostic pop +#endif diff --git a/test/test_SharedPtr.cpp b/test/test_SharedPtr.cpp index 82f4084..226d7db 100644 --- a/test/test_SharedPtr.cpp +++ b/test/test_SharedPtr.cpp @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#define RAD_DEFAULT_ALLOCATOR radtest::Allocator +#define RAD_DEFAULT_ALLOCATOR radtest::Mallocator #include "gtest/gtest.h" #include "test/TestAlloc.h" @@ -24,8 +24,8 @@ namespace sptestobjs { // clang-format off using NoThrowAllocSp = rad::SharedPtr; -using NoThrowObjBlock = rad::detail::PtrBlock>; -using ThrowObjBlock = rad::detail::PtrBlock>; +using NoThrowObjBlock = rad::detail::PtrBlock; +using ThrowObjBlock = rad::detail::PtrBlock; using NoThrowPair = NoThrowObjBlock::PairType; using ThrowPair = ThrowObjBlock::PairType; @@ -41,15 +41,15 @@ RAD_S_ASSERT(!noexcept(ThrowPair(rad::DeclVal()))); RAD_S_ASSERT(noexcept(NoThrowObjBlock(NoThrowObjBlock::AllocatorType()))); RAD_S_ASSERT(!noexcept(ThrowObjBlock(ThrowObjBlock::AllocatorType()))); -// allocate shared non-throwing allocator required -RAD_S_ASSERT(noexcept(rad::AllocateShared(radtest::Allocator(), 0))); +// allocate shared noexcept +RAD_S_ASSERT(!noexcept(rad::AllocateShared(radtest::Mallocator(), 0))); // allocate shared throwing constructor -RAD_S_ASSERT(!noexcept(rad::AllocateShared( - radtest::Allocator()))); +RAD_S_ASSERT(!noexcept( + rad::AllocateShared(radtest::Mallocator()))); // make shared noexcept -RAD_S_ASSERT(noexcept(rad::MakeShared(0))); +RAD_S_ASSERT(!noexcept(rad::MakeShared(0))); RAD_S_ASSERT(!noexcept(rad::MakeShared())); class Base @@ -97,14 +97,13 @@ namespace sptestobjs::NoThrowObjBlock* AllocTestBlock(int val) { typename sptestobjs::NoThrowObjBlock::AllocatorType alloc; - auto block = alloc.Alloc(1); - new (block) sptestobjs::NoThrowObjBlock(alloc, val); - return block; + auto block = alloc.AllocBytes(sizeof(sptestobjs::NoThrowObjBlock)); + return new (block) sptestobjs::NoThrowObjBlock(alloc, val); } void FreeTestBlock(sptestobjs::NoThrowObjBlock* block) { - block->Allocator().Free(block); + block->Allocator().FreeBytes(block, sizeof(sptestobjs::NoThrowObjBlock)); } } // namespace @@ -201,13 +200,13 @@ TEST(TestSharedPtr, RefCountLockWeakFailExchange) TEST(TestSharedPtr, PtrBlockCtor) { - using PtrBlock = rad::detail::PtrBlock>; + using PtrBlock = rad::detail::PtrBlock; PtrBlock::AllocatorType alloc; PtrBlock block(alloc, 2); EXPECT_EQ(block.Value(), 2); using StatefulPtrBlock = - rad::detail::PtrBlock>; + rad::detail::PtrBlock; StatefulPtrBlock::AllocatorType statefulAlloc; StatefulPtrBlock statefulBlock(statefulAlloc, 4); RAD_S_ASSERT(sizeof(statefulBlock) > @@ -252,8 +251,8 @@ TEST(TestSharedPtr, LockWeak) TEST(TestSharedPtr, ReleaseDestruct) { - using PtrBlock = rad::detail::PtrBlock>; + using PtrBlock = + rad::detail::PtrBlock; PtrBlock::AllocatorType alloc; PtrBlock block(alloc); EXPECT_EQ(DestructCounter::counter, 0); @@ -267,16 +266,16 @@ TEST(TestSharedPtr, ReleaseDestruct) TEST(TestSharedPtr, ReleaseFree) { using PtrBlock = - rad::detail::PtrBlock>; + rad::detail::PtrBlock; PtrBlock::AllocatorType alloc; alloc.ResetCounts(); - PtrBlock* block = alloc.Alloc(1); - new (block) PtrBlock(alloc); + void* mem = alloc.AllocBytes(sizeof(PtrBlock)); + PtrBlock* block = new (mem) PtrBlock(alloc); block->Release(); EXPECT_EQ(alloc.AllocCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts()); + alloc.VerifyCounts(); } TEST(TestSharedPtr, Value) @@ -303,7 +302,7 @@ TEST(TestSharedPtr, NullCtor) TEST(TestSharedPtr, AllocateShared) { - radtest::Allocator alloc; + radtest::Mallocator alloc; auto ptr = rad::AllocateShared(alloc, 2); EXPECT_TRUE(ptr); EXPECT_EQ(*ptr, 2); @@ -311,20 +310,20 @@ TEST(TestSharedPtr, AllocateShared) TEST(TestSharedPtr, AllocateSharedFail) { - radtest::FailingAllocator alloc; + radtest::FailingAllocator alloc; auto ptr = rad::AllocateShared(alloc, 2); EXPECT_FALSE(ptr); } TEST(TestSharedPtr, AllocateSharedThrows) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); EXPECT_THROW(rad::AllocateShared(alloc, 1), std::exception); EXPECT_EQ(alloc.AllocCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts()); + alloc.VerifyCounts(); } TEST(TestSharedPtr, MakeShared) @@ -486,7 +485,7 @@ TEST(TestSharedPtr, Swap) TEST(TestSharedPtr, SelfCopyAssign) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); auto ptr = rad::AllocateShared(alloc, 2); @@ -500,7 +499,7 @@ TEST(TestSharedPtr, SelfCopyAssign) TEST(TestSharedPtr, CopyAssignNoReset) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); auto ptr = rad::AllocateShared(alloc, 2); @@ -516,7 +515,7 @@ TEST(TestSharedPtr, CopyAssignNoReset) TEST(TestSharedPtr, CopyAssignReset) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); auto ptr = rad::AllocateShared(alloc, 2); @@ -527,12 +526,12 @@ TEST(TestSharedPtr, CopyAssignReset) EXPECT_EQ(*ptr2, 2); EXPECT_EQ(*ptr2, *ptr); - EXPECT_TRUE(alloc.VerifyCounts(2, 1)); + alloc.VerifyCounts(2, 1); } TEST(TestSharedPtr, CopyAssignNull) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); auto ptr = rad::AllocateShared(alloc, 2); @@ -544,7 +543,7 @@ TEST(TestSharedPtr, CopyAssignNull) TEST(TestSharedPtr, SelfMoveAssign) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); auto ptr = rad::AllocateShared(alloc, 2); @@ -553,12 +552,12 @@ TEST(TestSharedPtr, SelfMoveAssign) EXPECT_TRUE(ptr); EXPECT_EQ(ptr.UseCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts(1, 0)); + alloc.VerifyCounts(1, 0); } TEST(TestSharedPtr, MoveAssignNoReset) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); auto ptr = rad::AllocateShared(alloc, 2); @@ -570,12 +569,12 @@ TEST(TestSharedPtr, MoveAssignNoReset) EXPECT_EQ(ptr2.UseCount(), 1u); EXPECT_EQ(ptr2.WeakCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts(1, 0)); + alloc.VerifyCounts(1, 0); } TEST(TestSharedPtr, MoveAssignReset) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); auto ptr = rad::AllocateShared(alloc, 2); @@ -588,7 +587,7 @@ TEST(TestSharedPtr, MoveAssignReset) EXPECT_EQ(ptr2.UseCount(), 1u); EXPECT_EQ(ptr2.WeakCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts(2, 1)); + alloc.VerifyCounts(2, 1); } TEST(TestSharedPtr, PolymorphicCtor) @@ -599,7 +598,7 @@ TEST(TestSharedPtr, PolymorphicCtor) EXPECT_EQ(static_cast(&d), static_cast(b)); EXPECT_NE(static_cast(b), static_cast(e)); - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); { @@ -616,12 +615,12 @@ TEST(TestSharedPtr, PolymorphicCtor) } EXPECT_EQ(alloc.AllocCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts()); + alloc.VerifyCounts(); } TEST(TestSharedPtr, PolymorphicAssign) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); { @@ -643,12 +642,12 @@ TEST(TestSharedPtr, PolymorphicAssign) } EXPECT_EQ(alloc.AllocCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts()); + alloc.VerifyCounts(); } TEST(TestSharedPtr, StatefulAllocator) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); auto ptr = rad::AllocateShared(alloc, 2); @@ -662,6 +661,17 @@ TEST(TestSharedPtr, StatefulAllocator) EXPECT_EQ(alloc.FreeCount(), 1u); } +TEST(TestSharedPtr, UsesTypedAlloc) +{ + radtest::TypedAllocator alloc; + + auto ptr = rad::AllocateShared(alloc, 2); + EXPECT_TRUE(ptr); + EXPECT_EQ(*ptr, 2); + + ptr.Reset(); +} + TEST(TestWeakPtr, ConstructEmpy) { rad::WeakPtr weak; @@ -837,7 +847,7 @@ TEST(TestWeakPtr, PolymorphicCtor) EXPECT_EQ(static_cast(&d), static_cast(b)); EXPECT_NE(static_cast(b), static_cast(e)); - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); { @@ -870,12 +880,12 @@ TEST(TestWeakPtr, PolymorphicCtor) } EXPECT_EQ(alloc.AllocCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts()); + alloc.VerifyCounts(); } TEST(TestWeakPtr, PolymorphicAssign) { - radtest::StatefulCountingAllocator alloc; + radtest::StatefulCountingAllocator alloc; alloc.ResetCounts(); { @@ -920,7 +930,7 @@ TEST(TestWeakPtr, PolymorphicAssign) } EXPECT_EQ(alloc.AllocCount(), 1u); - EXPECT_TRUE(alloc.VerifyCounts()); + alloc.VerifyCounts(); } TEST(TestAtomicSharedPtr, Construct) diff --git a/test/test_Vector.cpp b/test/test_Vector.cpp index 4f81926..ec9b2da 100644 --- a/test/test_Vector.cpp +++ b/test/test_Vector.cpp @@ -19,7 +19,7 @@ #define RAD_ENABLE_NOTHROW_DTOR_ASSERTIONS 0 #define RAD_ENABLE_NOTHROW_MOVE_ASSERTIONS 0 -#define RAD_DEFAULT_ALLOCATOR radtest::Allocator +#define RAD_DEFAULT_ALLOCATOR radtest::Mallocator #include "gtest/gtest.h" #include "test/TestAlloc.h" @@ -241,8 +241,8 @@ TEST_F(TestVectorIntegral, InlineDefaultConstruct) TEST_F(TestVectorIntegral, AllocatorCopyConstruct) { - radtest::HeapAllocator heap; - radtest::AllocWrapper alloc(heap); + radtest::HeapResource heap; + radtest::ResourceAllocator alloc(heap); rad::Vector vec(alloc); EXPECT_TRUE(vec.Empty()); @@ -251,14 +251,14 @@ TEST_F(TestVectorIntegral, AllocatorCopyConstruct) EXPECT_EQ(heap.allocCount, 0); EXPECT_EQ(heap.freeCount, 0); - EXPECT_EQ(vec.GetAllocator().base, &heap); + EXPECT_EQ(vec.GetAllocator().m_res, &heap); } TEST_F(TestVectorIntegral, AllocatorMoveConstruct) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); EXPECT_TRUE(vec.Empty()); @@ -267,14 +267,14 @@ TEST_F(TestVectorIntegral, AllocatorMoveConstruct) EXPECT_EQ(heap.allocCount, 0); EXPECT_EQ(heap.freeCount, 0); - EXPECT_EQ(vec.GetAllocator().base, &heap); + EXPECT_EQ(vec.GetAllocator().m_res, &heap); } TEST_F(TestVectorIntegral, Reserve) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); EXPECT_TRUE(vec.Reserve(100).IsOk()); @@ -312,9 +312,9 @@ TEST_F(TestVectorIntegral, Reserve) TEST_F(TestVectorIntegral, InlineReserve) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::InlineVector vec(heap); EXPECT_TRUE(vec.Reserve(5).IsOk()); @@ -368,9 +368,9 @@ TEST_F(TestVectorIntegral, InlineReserve) TEST_F(TestVectorIntegral, ReserveFail) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); heap.forceAllocFails = 1; @@ -385,9 +385,9 @@ TEST_F(TestVectorIntegral, ReserveFail) TEST_F(TestVectorIntegral, InlineReserveFail) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::InlineVector vec(heap); heap.forceAllocFails = 1; @@ -1000,6 +1000,31 @@ TEST_F(TestVectorIntegral, Move) EXPECT_EQ(other.Size(), 3u); EXPECT_EQ(other.Front(), 1); EXPECT_EQ(other.Back(), 3); + +#ifdef RAD_GCC_VERSION +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wself-move" +#endif +#ifdef RAD_CLANG_VERSION +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wself-move" +#endif + + // self move assign + other = std::move(other); + EXPECT_EQ(other.Size(), 3u); + EXPECT_EQ(other.Front(), 1); + EXPECT_EQ(other.Back(), 3); + + vec = std::move(vec); + EXPECT_EQ(vec.Size(), 0u); + +#ifdef RAD_CLANG_VERSION +#pragma clang diagnostic pop +#endif +#ifdef RAD_GCC_VERSION +#pragma GCC diagnostic pop +#endif } TEST_F(TestVectorIntegral, Double) @@ -1155,9 +1180,9 @@ TEST_F(TestVectorIntegral, ResizeSame) TEST_F(TestVectorIntegral, ShrinkToFitNoMemory) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); EXPECT_TRUE(vec.Assign({ 1, 2, 3 }).IsOk()); @@ -1174,9 +1199,9 @@ TEST_F(TestVectorIntegral, ShrinkToFitNoMemory) TEST_F(TestVectorIntegral, InlineShrinkToFitNoMemory) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); EXPECT_TRUE(vec.Assign({ 1, 2, 3 }).IsOk()); @@ -1193,9 +1218,9 @@ TEST_F(TestVectorIntegral, InlineShrinkToFitNoMemory) TEST_F(TestVectorIntegral, CopyNoMemory) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); rad::Vector other(heap); @@ -1214,9 +1239,9 @@ TEST_F(TestVectorIntegral, CopyNoMemory) TEST_F(TestVectorIntegral, ResizeNoMemory) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); heap.forceAllocFails = 1; @@ -1234,9 +1259,9 @@ TEST_F(TestVectorIntegral, ResizeNoMemory) TEST_F(TestVectorIntegral, AssignNoMemory) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); heap.forceAllocFails = 1; @@ -1254,9 +1279,9 @@ TEST_F(TestVectorIntegral, AssignNoMemory) TEST_F(TestVectorIntegral, EmplaceBackNoMemory) { - using AllocWrap = radtest::AllocWrapper; + using AllocWrap = radtest::ResourceAllocator; - radtest::HeapAllocator heap; + radtest::HeapResource heap; rad::Vector vec(heap); heap.forceAllocFails = 1; @@ -1463,9 +1488,8 @@ TYPED_TEST_P(NonTrivialStruct, Resize) EXPECT_EQ(g_stats.CopyAssignCount, 0); EXPECT_EQ(g_stats.MoveAssignCount, 0); - auto allocator = - radtest::OOMAllocator(TypeParam::isNoThrow ? 1 : 3); - rad::Vector> fail(allocator); + auto allocator = radtest::OOMAllocator(TypeParam::isNoThrow ? 1 : 3); + rad::Vector fail(allocator); EXPECT_TRUE(fail.Resize(5, value).IsOk()); EXPECT_EQ(fail.Resize(10, value), rad::Error::NoMemory); @@ -1513,8 +1537,8 @@ TYPED_TEST_P(NonTrivialStruct, Assign) EXPECT_EQ(g_stats.CopyAssignCount, 0); EXPECT_EQ(g_stats.MoveAssignCount, 0); - rad::Vector> fail( - radtest::OOMAllocator(0)); + rad::Vector fail( + radtest::OOMAllocator(0)); EXPECT_EQ(fail.Assign(10, value), rad::Error::NoMemory); } @@ -1595,8 +1619,8 @@ TYPED_TEST_P(NonTrivialStruct, AssignSpan) EXPECT_EQ(g_stats.CopyAssignCount, 0); EXPECT_EQ(g_stats.MoveAssignCount, 0); - rad::Vector> fail( - radtest::OOMAllocator(0)); + rad::Vector fail( + radtest::OOMAllocator(0)); EXPECT_EQ(fail.Assign(spanVec.ToSpan()), rad::Error::NoMemory); } @@ -1724,12 +1748,12 @@ TYPED_TEST_P(NonTrivialStruct, Copy) EXPECT_EQ(g_stats.CopyAssignCount, 0); EXPECT_EQ(g_stats.MoveAssignCount, 0); - rad::Vector> good( - radtest::OOMAllocator(10)); + rad::Vector good( + radtest::OOMAllocator(10)); EXPECT_TRUE(good.Assign(10, value).IsOk()); - rad::Vector> fail( - radtest::OOMAllocator(0)); + rad::Vector fail( + radtest::OOMAllocator(0)); EXPECT_EQ(good.Copy(fail), rad::Error::NoMemory); } @@ -2373,3 +2397,255 @@ TEST_F(TestVectorStrongGuarantee, Copy) EXPECT_EQ(g_stats.CopyAssignCount, 0); EXPECT_EQ(g_stats.MoveAssignCount, 0); } + +#ifdef RAD_GCC_VERSION +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wmultichar" +#endif +TEST(VectorTest, TroubleAllocators) +{ + { + using StickyVec = rad::Vector; + StickyVec vec1(radtest::StickyTaggedAllocator{ 'tst1' }); + EXPECT_TRUE(vec1.PushBack(1).IsOk()); + + StickyVec vec2(rad::Move(vec1)); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec2.Size(), 1u); + + StickyVec vec3(radtest::StickyTaggedAllocator{ 'tst3' }); + EXPECT_TRUE(vec2.Copy(vec3).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(vec3.Size(), 1u); + EXPECT_EQ(vec3.GetAllocator().m_tag, uint32_t('tst3')); + + StickyVec vec3a(radtest::StickyTaggedAllocator{ 'tst4' }); + ASSERT_TRUE(vec3a.PushBack(42).IsOk()); + ASSERT_TRUE(vec3a.PushBack(99).IsOk()); + EXPECT_TRUE(vec2.Copy(vec3a).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(vec3a.Size(), 1u); + EXPECT_EQ(vec3a.GetAllocator().m_tag, uint32_t('tst4')); + + // These all static_assert, as they should + // vec1 = std::move(vec2); + // vec1.Swap(vec2); + // vec1.Move(vec2); + // vec1.Clone(); + } + { + using StickyVecNt = + rad::Vector; + StickyVecNt vec1(radtest::StickyTaggedAllocator{ 'tst1' }); + EXPECT_TRUE(vec1.PushBack(ThrowingVecTester(1)).IsOk()); + + StickyVecNt vec2(rad::Move(vec1)); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec2.Size(), 1u); + + StickyVecNt vec3(radtest::StickyTaggedAllocator{ 'tst3' }); + EXPECT_TRUE(vec2.Copy(vec3).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(vec3.Size(), 1u); + EXPECT_EQ(vec3.GetAllocator().m_tag, uint32_t('tst3')); + + StickyVecNt vec3a(radtest::StickyTaggedAllocator{ 'tst4' }); + ASSERT_TRUE(vec3a.PushBack(ThrowingVecTester(42)).IsOk()); + ASSERT_TRUE(vec3a.PushBack(ThrowingVecTester(99)).IsOk()); + EXPECT_TRUE(vec2.Copy(vec3a).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(vec3a.Size(), 1u); + EXPECT_EQ(vec3a.GetAllocator().m_tag, uint32_t('tst4')); + + // These all static_assert, as they should + // vec1 = std::move(vec2); + // vec1.Swap(vec2); + // vec1.Move(vec2); + // vec1.Clone(); + } + { + using StickyDefaultVec = + rad::Vector; + StickyDefaultVec vec1(radtest::StickyDefaultTaggedAllocator{ 'tst1' }); + EXPECT_TRUE(vec1.PushBack(1).IsOk()); + + StickyDefaultVec vec2(std::move(vec1)); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec1.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('tst1')); + + StickyDefaultVec vec3(radtest::StickyDefaultTaggedAllocator{ 'tst3' }); + EXPECT_TRUE(vec2.Copy(vec3).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(vec3.Size(), 1u); + EXPECT_EQ(vec3.GetAllocator().m_tag, uint32_t('tst3')); + + StickyDefaultVec vec3a(radtest::StickyDefaultTaggedAllocator{ 'tst4' }); + ASSERT_TRUE(vec3a.PushBack(42).IsOk()); + ASSERT_TRUE(vec3a.PushBack(99).IsOk()); + EXPECT_TRUE(vec2.Copy(vec3a).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('tst1')); + EXPECT_EQ(vec3a.Size(), 1u); + EXPECT_EQ(vec3a.GetAllocator().m_tag, uint32_t('tst4')); + + auto expected_vec4 = vec2.Clone(); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(expected_vec4.Ok().Size(), 1u); + EXPECT_EQ(expected_vec4.Ok().GetAllocator().m_tag, uint32_t(1234)); + + // These all static_assert, as they should + // vec1 = std::move(vec2); + // vec1.Swap(vec2); + // vec1.Move(vec2); + } + + { + using MovingVec = rad::Vector; + MovingVec vec1(radtest::MovingTaggedAllocator{ 'abcd' }); + EXPECT_TRUE(vec1.PushBack(1).IsOk()); + + MovingVec vec2(rad::Move(vec1)); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec2.Size(), 1u); + + MovingVec vec3(radtest::MovingTaggedAllocator{ 'tst3' }); + EXPECT_TRUE(vec2.Copy(vec3).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(vec3.Size(), 1u); + EXPECT_EQ(vec3.GetAllocator().m_tag, uint32_t('abcd')); + + MovingVec vec3a(radtest::MovingTaggedAllocator{ 'tst4' }); + ASSERT_TRUE(vec3a.PushBack(42).IsOk()); + ASSERT_TRUE(vec3a.PushBack(99).IsOk()); + EXPECT_TRUE(vec2.Copy(vec3a).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(vec3a.Size(), 1u); + EXPECT_EQ(vec3a.GetAllocator().m_tag, uint32_t('abcd')); + + MovingVec vec4(radtest::MovingTaggedAllocator{ 'wxyz' }); + EXPECT_TRUE(vec1.PushBack(99).IsOk()); + vec1.Swap(vec4); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec4.Size(), 1u); + EXPECT_EQ(vec1.GetAllocator().m_tag, uint32_t('wxyz')); + EXPECT_EQ(vec4.GetAllocator().m_tag, uint32_t('abcd')); + + vec1 = rad::Move(vec4); + EXPECT_TRUE(vec4.Empty()); + EXPECT_EQ(vec1.Size(), 1u); + EXPECT_EQ(vec1.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(vec4.GetAllocator().m_tag, uint32_t('abcd')); + + MovingVec vec5(radtest::MovingTaggedAllocator{ 'crwd' }); + vec1.Move(vec5); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec5.Size(), 1u); + EXPECT_EQ(vec1.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(vec5.GetAllocator().m_tag, uint32_t('abcd')); + } + { + using MovingVecNt = + rad::Vector; + MovingVecNt vec1(radtest::MovingTaggedAllocator{ 'abcd' }); + EXPECT_TRUE(vec1.PushBack(ThrowingVecTester(1)).IsOk()); + + MovingVecNt vec2(rad::Move(vec1)); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec2.Size(), 1u); + + MovingVecNt vec3(radtest::MovingTaggedAllocator{ 'tst3' }); + EXPECT_TRUE(vec2.Copy(vec3).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(vec3.Size(), 1u); + EXPECT_EQ(vec3.GetAllocator().m_tag, uint32_t('abcd')); + + MovingVecNt vec3a(radtest::MovingTaggedAllocator{ 'tst4' }); + ASSERT_TRUE(vec3a.PushBack(ThrowingVecTester(42)).IsOk()); + ASSERT_TRUE(vec3a.PushBack(ThrowingVecTester(99)).IsOk()); + EXPECT_TRUE(vec2.Copy(vec3a).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec2.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(vec3a.Size(), 1u); + EXPECT_EQ(vec3a.GetAllocator().m_tag, uint32_t('abcd')); + + MovingVecNt vec4(radtest::MovingTaggedAllocator{ 'wxyz' }); + EXPECT_TRUE(vec1.PushBack(ThrowingVecTester(99)).IsOk()); + vec1.Swap(vec4); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec4.Size(), 1u); + EXPECT_EQ(vec1.GetAllocator().m_tag, uint32_t('wxyz')); + EXPECT_EQ(vec4.GetAllocator().m_tag, uint32_t('abcd')); + + vec1 = rad::Move(vec4); + EXPECT_TRUE(vec4.Empty()); + EXPECT_EQ(vec1.Size(), 1u); + EXPECT_EQ(vec1.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(vec4.GetAllocator().m_tag, uint32_t('abcd')); + + MovingVecNt vec5(radtest::MovingTaggedAllocator{ 'crwd' }); + vec1.Move(vec5); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec5.Size(), 1u); + EXPECT_EQ(vec1.GetAllocator().m_tag, uint32_t('abcd')); + EXPECT_EQ(vec5.GetAllocator().m_tag, uint32_t('abcd')); + } + + { + using TypedVec = rad::Vector; + TypedVec vec1; + EXPECT_TRUE(vec1.PushBack(1).IsOk()); + + TypedVec vec2(rad::Move(vec1)); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec2.Size(), 1u); + + TypedVec vec3; + EXPECT_TRUE(vec2.Copy(vec3).IsOk()); + EXPECT_EQ(vec2.Size(), 1u); + EXPECT_EQ(vec3.Size(), 1u); + + vec1 = rad::Move(vec2); + EXPECT_TRUE(vec2.Empty()); + EXPECT_EQ(vec1.Size(), 1u); + + vec1.Swap(vec2); + EXPECT_TRUE(vec1.Empty()); + EXPECT_EQ(vec2.Size(), 1u); + + TypedVec vec4; + vec2.Move(vec4); + EXPECT_TRUE(vec2.Empty()); + EXPECT_EQ(vec4.Size(), 1u); + } + + { + int value = 42; + rad::Vector good( + radtest::MoveOOMAllocator(1, 1234)); // you get one good allocation + EXPECT_TRUE(good.Assign(10, value).IsOk()); + + rad::Vector fail( + radtest::MoveOOMAllocator(99, 5678)); // not ever used + EXPECT_EQ(good.Copy(fail), rad::Error::NoMemory); + EXPECT_EQ(fail.GetAllocator().m_id, 5678); + + rad::Vector pass( + radtest::MoveOOMAllocator(99, 5678)); + EXPECT_TRUE(pass.Assign(10, value).IsOk()); + EXPECT_TRUE(pass.Copy(good).IsOk()); + EXPECT_EQ(good.GetAllocator().m_id, 5678); + } +} +#ifdef RAD_GCC_VERSION +#pragma GCC diagnostic pop +#endif From 2d14b0b0e01af40369198744bf424e3ac97ad734 Mon Sep 17 00:00:00 2001 From: Ben Craig Date: Wed, 15 Jan 2025 16:26:44 -0500 Subject: [PATCH 2/3] Disable multichar warning --- default_copts.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/default_copts.bzl b/default_copts.bzl index 22e63d9..d6f1ac3 100644 --- a/default_copts.bzl +++ b/default_copts.bzl @@ -44,6 +44,7 @@ RAD_GCC_COPTS = [ "-Wvarargs", "-Wvla", "-Wwrite-strings", + "-Wno-multichar", "-Werror", "-Wpedantic", ] From 9840c667da1a51259aa5efbcd81e8614120d642f Mon Sep 17 00:00:00 2001 From: Ben Craig Date: Thu, 16 Jan 2025 09:51:59 -0500 Subject: [PATCH 3/3] Add version checks to -Wself-move suppressions --- radiant/TotallyRad.h | 4 ++-- test/test_List.cpp | 4 ++-- test/test_Vector.cpp | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/radiant/TotallyRad.h b/radiant/TotallyRad.h index 18bff7f..ab3c1c5 100644 --- a/radiant/TotallyRad.h +++ b/radiant/TotallyRad.h @@ -72,12 +72,12 @@ #if defined(__clang__) && __clang__ #define RAD_CLANG_VERSION \ - (__clang_major__ * 10000 + __clang_minor__ + 100 + __clang_patchlevel__) + (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) #endif #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ #define RAD_GCC_VERSION \ - (__GNUC__ * 10000 + __GNUC__MINOR__ + 100 + __GNUC_PATCHLEVEL__) + (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) #endif #if defined(_MSC_VER) && _MSC_VER diff --git a/test/test_List.cpp b/test/test_List.cpp index 6820f81..bed38bf 100644 --- a/test/test_List.cpp +++ b/test/test_List.cpp @@ -449,7 +449,7 @@ TEST(ListTest, MoveAssign) ListEqual(one, { 101 }); ListEqual(move_from_one, { 1, 201 }); } -#ifdef RAD_GCC_VERSION +#if defined(RAD_GCC_VERSION) && RAD_GCC_VERSION >= 130000 #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wself-move" #endif @@ -472,7 +472,7 @@ TEST(ListTest, MoveAssign) #ifdef RAD_CLANG_VERSION #pragma clang diagnostic pop #endif -#ifdef RAD_GCC_VERSION +#if defined(RAD_GCC_VERSION) && RAD_GCC_VERSION >= 130000 #pragma GCC diagnostic pop #endif } diff --git a/test/test_Vector.cpp b/test/test_Vector.cpp index ec9b2da..8154684 100644 --- a/test/test_Vector.cpp +++ b/test/test_Vector.cpp @@ -1001,7 +1001,7 @@ TEST_F(TestVectorIntegral, Move) EXPECT_EQ(other.Front(), 1); EXPECT_EQ(other.Back(), 3); -#ifdef RAD_GCC_VERSION +#if defined(RAD_GCC_VERSION) && RAD_GCC_VERSION >= 130000 #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wself-move" #endif @@ -1022,7 +1022,7 @@ TEST_F(TestVectorIntegral, Move) #ifdef RAD_CLANG_VERSION #pragma clang diagnostic pop #endif -#ifdef RAD_GCC_VERSION +#if defined(RAD_GCC_VERSION) && RAD_GCC_VERSION >= 130000 #pragma GCC diagnostic pop #endif }