From c99ad2265b8cc4302584545109db435a8fa2f5fb Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 4 Jun 2024 11:49:16 +0100 Subject: [PATCH 01/43] Made Allocator::setBlockSize(..) to allow subclasses to override it. --- include/vsg/core/Allocator.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index 69e067522..caebc6fa2 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -115,7 +115,7 @@ namespace vsg MemoryBlocks* getOrCreateMemoryBlocks(AllocatorAffinity allocatorAffinity, const std::string& name, size_t blockSize, size_t in_alignment = 4); - void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize); + virtual void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize); mutable std::mutex mutex; From f791a8ab732af6f5728becc889ac20300a240751 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 4 Jun 2024 11:49:43 +0100 Subject: [PATCH 02/43] Added allocator type report message --- src/vsg/core/Allocator.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 0e74a3498..1761a4451 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -55,6 +55,7 @@ std::unique_ptr& Allocator::instance() void Allocator::report(std::ostream& out) const { out << "Allocator::report() " << allocatorMemoryBlocks.size() << std::endl; + out << "allocatorType = " << allocatorType << std::endl; out << "totalAvailableSize = " << totalAvailableSize() << ", totalReservedSize = " << totalReservedSize() << ", totalMemorySize = " << totalMemorySize() << std::endl; double totalReserved = static_cast(totalReservedSize()); From 7537300db64851d55b130ac6e2b29a003cb7e451 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 11 Jun 2024 19:00:49 +0100 Subject: [PATCH 03/43] Changed vsg::Allocator to be a pure virtual base class. Moved block allocation implementation into OriginalBlockAllocator subclass from vsg::Allocator in prep for introducing the new InstrusiveAllocator. --- include/vsg/core/Allocator.h | 155 ++++++++++++++++++++++------------- src/vsg/core/Allocator.cpp | 94 ++++++++++----------- 2 files changed, 145 insertions(+), 104 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index caebc6fa2..bfeef17df 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -41,94 +41,51 @@ namespace vsg class VSG_DECLSPEC Allocator { public: - explicit Allocator(size_t in_default_alignment = 4); - explicit Allocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4); - - virtual ~Allocator(); + explicit Allocator(size_t in_default_alignment = 4) : default_alignment(in_default_alignment) {} + explicit Allocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4) : default_alignment(in_default_alignment), nestedAllocator(std::move(in_nestedAllocator)) {} + virtual ~Allocator() {} /// Allocator singleton static std::unique_ptr& instance(); /// allocate from the pool of memory blocks, or allocate from a new memory block - virtual void* allocate(std::size_t size, AllocatorAffinity allocatorAffinity = ALLOCATOR_AFFINITY_OBJECTS); + virtual void* allocate(std::size_t size, AllocatorAffinity allocatorAffinity = ALLOCATOR_AFFINITY_OBJECTS) = 0; /// deallocate, returning data to pool. - virtual bool deallocate(void* ptr, std::size_t size); + virtual bool deallocate(void* ptr, std::size_t size) = 0; /// delete any MemoryBlock that are empty - virtual size_t deleteEmptyMemoryBlocks(); + virtual size_t deleteEmptyMemoryBlocks() = 0; /// return the total available size of allocated MemoryBlocks - virtual size_t totalAvailableSize() const; + virtual size_t totalAvailableSize() const = 0; /// return the total reserved size of allocated MemoryBlocks - virtual size_t totalReservedSize() const; + virtual size_t totalReservedSize() const = 0; /// return the total memory size of allocated MemoryBlocks - virtual size_t totalMemorySize() const; - - /// report stats about blocks of memory allocated. - virtual void report(std::ostream& out) const; + virtual size_t totalMemorySize() const = 0; AllocatorType allocatorType = ALLOCATOR_TYPE_VSG_ALLOCATOR; // use MemoryBlocks by default int memoryTracking = MEMORY_TRACKING_DEFAULT; /// set the MemoryTracking member of the vsg::Allocator and all the MemoryBlocks that it manages. - void setMemoryTracking(int mt); - - struct MemoryBlock - { - MemoryBlock(size_t blockSize, int memoryTracking, size_t in_alignment); - virtual ~MemoryBlock(); + virtual void setMemoryTracking(int mt) = 0; - void* allocate(std::size_t size); - bool deallocate(void* ptr, std::size_t size); - - vsg::MemorySlots memorySlots; - size_t alignment = 4; - size_t block_alignment = 16; - uint8_t* memory = nullptr; - }; - - struct MemoryBlocks - { - Allocator* parent = nullptr; - std::string name; - size_t blockSize = 0; - size_t alignment = 4; - std::map> memoryBlocks; - std::shared_ptr latestMemoryBlock; - - MemoryBlocks(Allocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment); - virtual ~MemoryBlocks(); - - void* allocate(std::size_t size); - bool deallocate(void* ptr, std::size_t size); + virtual void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) = 0; - size_t deleteEmptyMemoryBlocks(); - size_t totalAvailableSize() const; - size_t totalReservedSize() const; - size_t totalMemorySize() const; - }; - - MemoryBlocks* getMemoryBlocks(AllocatorAffinity allocatorAffinity); - - MemoryBlocks* getOrCreateMemoryBlocks(AllocatorAffinity allocatorAffinity, const std::string& name, size_t blockSize, size_t in_alignment = 4); - - virtual void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize); + /// report stats about blocks of memory allocated. + virtual void report(std::ostream& out) const = 0; mutable std::mutex mutex; - size_t default_alignment = 4; - double allocationTime = 0.0; double deallocationTime = 0.0; protected: + // if you are assigning a custom allocator you must retain the old allocator to manage the memory it allocated and needs to delete std::unique_ptr nestedAllocator; - - std::vector> allocatorMemoryBlocks; }; /// allocate memory using vsg::Allocator::instance() if available, otherwise use std::malloc(size) @@ -179,4 +136,88 @@ namespace vsg template using allocator_affinity_physics = allocator_affinity_adapter; + ///// + + class VSG_DECLSPEC OriginalBlockAllocator : public Allocator + { + public: + explicit OriginalBlockAllocator(size_t in_default_alignment = 4); + explicit OriginalBlockAllocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4); + + virtual ~OriginalBlockAllocator(); + + /// allocate from the pool of memory blocks, or allocate from a new memory block + void* allocate(std::size_t size, AllocatorAffinity allocatorAffinity = ALLOCATOR_AFFINITY_OBJECTS) override; + + /// deallocate, returning data to pool. + bool deallocate(void* ptr, std::size_t size) override; + + /// delete any MemoryBlock that are empty + size_t deleteEmptyMemoryBlocks() override; + + /// return the total available size of allocated MemoryBlocks + size_t totalAvailableSize() const override; + + /// return the total reserved size of allocated MemoryBlocks + size_t totalReservedSize() const override; + + /// return the total memory size of allocated MemoryBlocks + size_t totalMemorySize() const override; + + /// report stats about blocks of memory allocated. + void report(std::ostream& out) const override; + + /// set the MemoryTracking member of the vsg::Allocator and all the MemoryBlocks that it manages. + void setMemoryTracking(int mt) override; + + void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; + + struct MemoryBlock + { + MemoryBlock(size_t blockSize, int memoryTracking, size_t in_alignment); + virtual ~MemoryBlock(); + + void* allocate(std::size_t size); + bool deallocate(void* ptr, std::size_t size); + + vsg::MemorySlots memorySlots; + size_t alignment = 4; + size_t block_alignment = 16; + uint8_t* memory = nullptr; + }; + + struct MemoryBlocks + { + OriginalBlockAllocator* parent = nullptr; + std::string name; + size_t blockSize = 0; + size_t alignment = 4; + std::map> memoryBlocks; + std::shared_ptr latestMemoryBlock; + + MemoryBlocks(OriginalBlockAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment); + virtual ~MemoryBlocks(); + + void* allocate(std::size_t size); + bool deallocate(void* ptr, std::size_t size); + + size_t deleteEmptyMemoryBlocks(); + size_t totalAvailableSize() const; + size_t totalReservedSize() const; + size_t totalMemorySize() const; + }; + + MemoryBlocks* getMemoryBlocks(AllocatorAffinity allocatorAffinity); + + MemoryBlocks* getOrCreateMemoryBlocks(AllocatorAffinity allocatorAffinity, const std::string& name, size_t blockSize, size_t in_alignment = 4); + + double allocationTime = 0.0; + double deallocationTime = 0.0; + + protected: + + std::vector> allocatorMemoryBlocks; + }; + + } // namespace vsg diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 1761a4451..7446f630f 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -20,12 +20,18 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI using namespace vsg; +std::unique_ptr& Allocator::instance() +{ + static std::unique_ptr s_allocator(new OriginalBlockAllocator()); + return s_allocator; +} + //////////////////////////////////////////////////////////////////////////////////////////////////// // -// vsg::Allocator +// vsg::OriginalBlockAllocator // -Allocator::Allocator(size_t in_default_alignment) : - default_alignment(in_default_alignment) +OriginalBlockAllocator::OriginalBlockAllocator(size_t in_default_alignment) : + Allocator(in_default_alignment) { allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); @@ -36,25 +42,19 @@ Allocator::Allocator(size_t in_default_alignment) : allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "MemoryBlocks_PHYSICS", size_t(Megabyte), 16)); } -Allocator::Allocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment) : - Allocator(in_default_alignment) +OriginalBlockAllocator::OriginalBlockAllocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment) : + Allocator(std::move(in_nestedAllocator), in_default_alignment) { - nestedAllocator = std::move(in_nestedAllocator); } -Allocator::~Allocator() +OriginalBlockAllocator::~OriginalBlockAllocator() { } -std::unique_ptr& Allocator::instance() -{ - static std::unique_ptr s_allocator(new Allocator()); - return s_allocator; -} -void Allocator::report(std::ostream& out) const +void OriginalBlockAllocator::report(std::ostream& out) const { - out << "Allocator::report() " << allocatorMemoryBlocks.size() << std::endl; + out << "OriginalBlockAllocator::report() " << allocatorMemoryBlocks.size() << std::endl; out << "allocatorType = " << allocatorType << std::endl; out << "totalAvailableSize = " << totalAvailableSize() << ", totalReservedSize = " << totalReservedSize() << ", totalMemorySize = " << totalMemorySize() << std::endl; double totalReserved = static_cast(totalReservedSize()); @@ -89,7 +89,7 @@ void Allocator::report(std::ostream& out) const } } -void* Allocator::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) +void* OriginalBlockAllocator::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) { std::scoped_lock lock(mutex); @@ -98,7 +98,7 @@ void* Allocator::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) { if (memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) { - info("Allocator::allocate(", size, ", ", allocatorAffinity, ") out of bounds allocating new MemoryBlock"); + info("OriginalBlockAllocator::allocate(", size, ", ", allocatorAffinity, ") out of bounds allocating new MemoryBlock"); } auto name = make_string("MemoryBlocks_", allocatorAffinity); @@ -122,15 +122,15 @@ void* Allocator::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) } } - void* ptr = Allocator::allocate(size, allocatorAffinity); + void* ptr = OriginalBlockAllocator::allocate(size, allocatorAffinity); if (memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) { - info("Allocator::allocate(", size, ", ", int(allocatorAffinity), ") ptr = ", ptr); + info("OriginalBlockAllocator::allocate(", size, ", ", int(allocatorAffinity), ") ptr = ", ptr); } return ptr; } -bool Allocator::deallocate(void* ptr, std::size_t size) +bool OriginalBlockAllocator::deallocate(void* ptr, std::size_t size) { std::scoped_lock lock(mutex); @@ -165,7 +165,7 @@ bool Allocator::deallocate(void* ptr, std::size_t size) return false; } -size_t Allocator::deleteEmptyMemoryBlocks() +size_t OriginalBlockAllocator::deleteEmptyMemoryBlocks() { std::scoped_lock lock(mutex); @@ -177,7 +177,7 @@ size_t Allocator::deleteEmptyMemoryBlocks() return memoryDeleted; } -size_t Allocator::totalAvailableSize() const +size_t OriginalBlockAllocator::totalAvailableSize() const { std::scoped_lock lock(mutex); @@ -189,7 +189,7 @@ size_t Allocator::totalAvailableSize() const return size; } -size_t Allocator::totalReservedSize() const +size_t OriginalBlockAllocator::totalReservedSize() const { std::scoped_lock lock(mutex); @@ -201,7 +201,7 @@ size_t Allocator::totalReservedSize() const return size; } -size_t Allocator::totalMemorySize() const +size_t OriginalBlockAllocator::totalMemorySize() const { std::scoped_lock lock(mutex); @@ -213,7 +213,7 @@ size_t Allocator::totalMemorySize() const return size; } -Allocator::MemoryBlocks* Allocator::getMemoryBlocks(AllocatorAffinity allocatorAffinity) +OriginalBlockAllocator::MemoryBlocks* OriginalBlockAllocator::getMemoryBlocks(AllocatorAffinity allocatorAffinity) { std::scoped_lock lock(mutex); @@ -221,7 +221,7 @@ Allocator::MemoryBlocks* Allocator::getMemoryBlocks(AllocatorAffinity allocatorA return {}; } -Allocator::MemoryBlocks* Allocator::getOrCreateMemoryBlocks(AllocatorAffinity allocatorAffinity, const std::string& name, size_t blockSize, size_t alignment) +OriginalBlockAllocator::MemoryBlocks* OriginalBlockAllocator::getOrCreateMemoryBlocks(AllocatorAffinity allocatorAffinity, const std::string& name, size_t blockSize, size_t alignment) { std::scoped_lock lock(mutex); @@ -239,7 +239,7 @@ Allocator::MemoryBlocks* Allocator::getOrCreateMemoryBlocks(AllocatorAffinity al return allocatorMemoryBlocks[allocatorAffinity].get(); } -void Allocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) +void OriginalBlockAllocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) { std::scoped_lock lock(mutex); @@ -256,7 +256,7 @@ void Allocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSi } } -void Allocator::setMemoryTracking(int mt) +void OriginalBlockAllocator::setMemoryTracking(int mt) { memoryTracking = mt; for (auto& amb : allocatorMemoryBlocks) @@ -273,9 +273,9 @@ void Allocator::setMemoryTracking(int mt) //////////////////////////////////////////////////////////////////////////////////////////////////// // -// vsg::Allocator::MemoryBlock +// vsg::OriginalBlockAllocator::MemoryBlock // -Allocator::MemoryBlock::MemoryBlock(size_t blockSize, int memoryTracking, size_t in_alignment) : +OriginalBlockAllocator::MemoryBlock::MemoryBlock(size_t blockSize, int memoryTracking, size_t in_alignment) : memorySlots(blockSize, memoryTracking), alignment(in_alignment) { @@ -290,7 +290,7 @@ Allocator::MemoryBlock::MemoryBlock(size_t blockSize, int memoryTracking, size_t } } -Allocator::MemoryBlock::~MemoryBlock() +OriginalBlockAllocator::MemoryBlock::~MemoryBlock() { if (memorySlots.memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) { @@ -300,7 +300,7 @@ Allocator::MemoryBlock::~MemoryBlock() operator delete (memory, std::align_val_t{block_alignment}); } -void* Allocator::MemoryBlock::allocate(std::size_t size) +void* OriginalBlockAllocator::MemoryBlock::allocate(std::size_t size) { auto [allocated, offset] = memorySlots.reserve(size, alignment); if (allocated) @@ -309,7 +309,7 @@ void* Allocator::MemoryBlock::allocate(std::size_t size) return nullptr; } -bool Allocator::MemoryBlock::deallocate(void* ptr, std::size_t size) +bool OriginalBlockAllocator::MemoryBlock::deallocate(void* ptr, std::size_t size) { if (ptr >= memory) { @@ -318,7 +318,7 @@ bool Allocator::MemoryBlock::deallocate(void* ptr, std::size_t size) { if (!memorySlots.release(offset, size)) { - warn("Allocator::MemoryBlock::deallocate(", ptr, ") problem - couldn't release"); + warn("OriginalBlockAllocator::MemoryBlock::deallocate(", ptr, ") problem - couldn't release"); } return true; } @@ -328,9 +328,9 @@ bool Allocator::MemoryBlock::deallocate(void* ptr, std::size_t size) //////////////////////////////////////////////////////////////////////////////////////////////////// // -// vsg::Allocator::MemoryBlocks +// vsg::OriginalBlockAllocator::MemoryBlocks // -Allocator::MemoryBlocks::MemoryBlocks(Allocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment) : +OriginalBlockAllocator::MemoryBlocks::MemoryBlocks(OriginalBlockAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment) : parent(in_parent), name(in_name), blockSize(in_blockSize), @@ -338,11 +338,11 @@ Allocator::MemoryBlocks::MemoryBlocks(Allocator* in_parent, const std::string& i { if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) { - info("Allocator::MemoryBlocks::MemoryBlocks(", parent, ", ", name, ", ", blockSize, ")"); + info("OriginalBlockAllocator::MemoryBlocks::MemoryBlocks(", parent, ", ", name, ", ", blockSize, ")"); } } -Allocator::MemoryBlocks::~MemoryBlocks() +OriginalBlockAllocator::MemoryBlocks::~MemoryBlocks() { if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) { @@ -350,7 +350,7 @@ Allocator::MemoryBlocks::~MemoryBlocks() } } -void* Allocator::MemoryBlocks::allocate(std::size_t size) +void* OriginalBlockAllocator::MemoryBlocks::allocate(std::size_t size) { if (latestMemoryBlock) { @@ -380,13 +380,13 @@ void* Allocator::MemoryBlocks::allocate(std::size_t size) if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) { - info("Allocator::MemoryBlocks::allocate(", size, ") MemoryBlocks.name = ", name, ", allocated in new MemoryBlock ", parent->memoryTracking); + info("OriginalBlockAllocator::MemoryBlocks::allocate(", size, ") MemoryBlocks.name = ", name, ", allocated in new MemoryBlock ", parent->memoryTracking); } return ptr; } -bool Allocator::MemoryBlocks::deallocate(void* ptr, std::size_t size) +bool OriginalBlockAllocator::MemoryBlocks::deallocate(void* ptr, std::size_t size) { if (memoryBlocks.empty()) return false; @@ -418,7 +418,7 @@ bool Allocator::MemoryBlocks::deallocate(void* ptr, std::size_t size) return false; } -size_t Allocator::MemoryBlocks::deleteEmptyMemoryBlocks() +size_t OriginalBlockAllocator::MemoryBlocks::deleteEmptyMemoryBlocks() { size_t memoryDeleted = 0; if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) @@ -448,7 +448,7 @@ size_t Allocator::MemoryBlocks::deleteEmptyMemoryBlocks() return memoryDeleted; } -size_t Allocator::MemoryBlocks::totalAvailableSize() const +size_t OriginalBlockAllocator::MemoryBlocks::totalAvailableSize() const { size_t size = 0; for (auto& value : memoryBlocks) @@ -458,7 +458,7 @@ size_t Allocator::MemoryBlocks::totalAvailableSize() const return size; } -size_t Allocator::MemoryBlocks::totalReservedSize() const +size_t OriginalBlockAllocator::MemoryBlocks::totalReservedSize() const { size_t size = 0; for (auto& value : memoryBlocks) @@ -468,7 +468,7 @@ size_t Allocator::MemoryBlocks::totalReservedSize() const return size; } -size_t Allocator::MemoryBlocks::totalMemorySize() const +size_t OriginalBlockAllocator::MemoryBlocks::totalMemorySize() const { size_t size = 0; for (auto& value : memoryBlocks) @@ -480,14 +480,14 @@ size_t Allocator::MemoryBlocks::totalMemorySize() const //////////////////////////////////////////////////////////////////////////////////////////////////// // -// vsg::allocate and vsg::deallocate convenience functions that map to using the Allocator singleton. +// vsg::allocate and vsg::deallocate convenience functions that map to using the OriginalBlockAllocator singleton. // void* vsg::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) { - return Allocator::instance()->allocate(size, allocatorAffinity); + return OriginalBlockAllocator::instance()->allocate(size, allocatorAffinity); } void vsg::deallocate(void* ptr, std::size_t size) { - Allocator::instance()->deallocate(ptr, size); + OriginalBlockAllocator::instance()->deallocate(ptr, size); } From 8b7e631f9c83ba1736b74452c2308c25631ec57e Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 12 Jun 2024 10:23:13 +0100 Subject: [PATCH 04/43] Moved MemoryBlocks classes into protected scope --- include/vsg/core/Allocator.h | 1 + 1 file changed, 1 insertion(+) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index bfeef17df..23c15c657 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -172,6 +172,7 @@ namespace vsg void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; + protected: struct MemoryBlock { MemoryBlock(size_t blockSize, int memoryTracking, size_t in_alignment); From 974e1f756b5aa28cb4449ad1e4fec2e4083570df Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 12 Jun 2024 14:23:20 +0100 Subject: [PATCH 05/43] Added in the InstrusiveAllocator --- include/vsg/core/Allocator.h | 112 ++++++ src/vsg/core/Allocator.cpp | 702 ++++++++++++++++++++++++++++++++++- 2 files changed, 812 insertions(+), 2 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index 23c15c657..970a00f62 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -220,5 +220,117 @@ namespace vsg std::vector> allocatorMemoryBlocks; }; + class VSG_DECLSPEC IntrusiveAllocator : public Allocator + { + public: + IntrusiveAllocator(std::unique_ptr in_nestedAllocator = {}); + + ~IntrusiveAllocator(); + + void report(std::ostream& out) const override; + + void* allocate(std::size_t size, AllocatorAffinity allocatorAffinity = ALLOCATOR_AFFINITY_OBJECTS) override; + + bool deallocate(void* ptr, std::size_t size) override; + + bool validate() const; + + size_t deleteEmptyMemoryBlocks() override; + size_t totalAvailableSize() const override; + size_t totalReservedSize() const override; + size_t totalMemorySize() const override; + void setMemoryTracking(int mt) override; + void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; + + protected: + + struct MemoryBlock + { + MemoryBlock(const std::string& in_name, size_t in_blockSize, size_t in_alignment); + virtual ~MemoryBlock(); + + std::string name; + + void* allocate(std::size_t size); + bool deallocate(void* ptr, std::size_t size); + + void report(std::ostream& out) const; + + // bitfield packing of doubly-linked with status field into a 4 byte word + struct Element + { + + Element(size_t in_index) : + index(in_index) {} + + Element(size_t in_previous, size_t in_next, unsigned int in_status) : + previous(in_previous), + next(in_next), + status(in_status) {} + + union + { + uint32_t index; + + struct + { + unsigned int previous : 15; + unsigned int next : 15; + unsigned int status : 2; + }; + }; + }; + + struct FreeList + { + size_t minimum_size = 0; + size_t maximum_size = 0; + size_t count = 0; + size_t head = 0; + }; + + using Offset = uint16_t; + using value_type = Element; + value_type* memory = nullptr; + value_type* memory_end = nullptr; + size_t capacity = 0; + + size_t alignment = 4; // min aligment is 4. + size_t block_alignment = 16; + size_t blockSize = 0; + + std::vector freeLists; + + bool validate() const; + + bool freeSlotsAvaible(size_t size) const; + + inline bool within(void* ptr) const { return memory <= ptr && ptr < memory_end; } + + }; + + class MemoryBlocks + { + public: + MemoryBlocks(IntrusiveAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment); + virtual ~MemoryBlocks(); + + IntrusiveAllocator* parent = nullptr; + std::string name; + size_t alignment = 4; + size_t blockSize; + std::vector> memoryBlocks; + std::shared_ptr memoryBlockWithSpace; + + void* allocate(std::size_t size); + void report(std::ostream& out) const; + bool validate() const; + }; + + std::vector> allocatorMemoryBlocks; + std::map> memoryBlocks; + }; + + } // namespace vsg diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 7446f630f..f1ccb7944 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -484,10 +484,708 @@ size_t OriginalBlockAllocator::MemoryBlocks::totalMemorySize() const // void* vsg::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) { - return OriginalBlockAllocator::instance()->allocate(size, allocatorAffinity); + return Allocator::instance()->allocate(size, allocatorAffinity); } void vsg::deallocate(void* ptr, std::size_t size) { - OriginalBlockAllocator::instance()->deallocate(ptr, size); + Allocator::instance()->deallocate(ptr, size); } + +//////////////////////////////////////////////////////////////////////////////////////////////////// +// +// vsg::InstrusiveAllocator +// + +////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// +// MemoryBlock +// +IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t in_blockSize, size_t in_alignment) : + name(in_name), + alignment(in_alignment), + blockSize(in_blockSize) +{ + // // vsg::debug("IntrusiveAllocator::MemoryBlock::MemoryBlock(", in_blockSize, ", ", in_alignment, ")"); + + alignment = std::max(alignment, sizeof(value_type)); // we need to be a multiple of sizeof(value_type) + block_alignment = std::max(alignment, alignof(std::max_align_t)); + block_alignment = std::max(block_alignment, size_t{16}); + + // round blockSize up to nearest aligned size + blockSize = ((blockSize+alignment-1) / alignment) * alignment; + + //memory = static_cast(operator new (blockSize, std::align_val_t{block_alignment})); + memory = static_cast(operator new (blockSize)); + memory_end = memory + blockSize / sizeof(value_type); + capacity = blockSize / alignment; + + size_t max_slot_size = (1 << 15) - 1; + + // // vsg::debug(" capacity = ", capacity, ", max_slot_size = ", max_slot_size); + + // set up the free tracking to encompass the whole buffer + freeLists.emplace_back(); + FreeList& freeList = freeLists.front(); + freeList.minimum_size = 2 * sizeof(Element); + freeList.maximum_size = (max_slot_size - 1) * sizeof(Element); + freeList.head = 1; // start at position 1 so that position 0 can be used to mark beginning or end of free lists + freeList.count = 0; + + // mark the first element as 0. + memory[0].index = 0; + + size_t previous_position = 0; // 0 marks the beginning of the free list + size_t position = freeList.head; + for(; position < capacity;) + { + size_t next_position = std::min(position + max_slot_size, capacity); + + memory[position] = Element{ (previous_position == 0) ? 0 : (position - previous_position), next_position - position, 1 }; + memory[position+1].index = previous_position; + memory[position+2].index = (next_position < capacity) ? next_position : 0; + previous_position = position; + position = next_position; + ++freeList.count; + } +} + +IntrusiveAllocator::MemoryBlock::~MemoryBlock() +{ + //operator delete (memory, std::align_val_t{block_alignment}); + operator delete (memory); +} + +bool IntrusiveAllocator::MemoryBlock::freeSlotsAvaible(size_t size) const +{ + for(auto& freeList : freeLists) + { + if (freeList.maximum_size >= size && freeList.count > 0) return true; + } + return false; +} + +void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) +{ + for(auto& freeList : freeLists) + { + // check if freeList has available slots and maximum_size is big enough + if (freeList.count == 0 || size > freeList.maximum_size) continue; + + size_t freePosition = freeList.head; + while (freePosition != 0) + { + auto& slot = memory[freePosition]; + if (slot.status != 1) + { + throw "Warning: allocated slot found in freeList"; + } + + size_t previousFreePosition = memory[freePosition+1].index; + size_t nextFreePosition = memory[freePosition+2].index; + + size_t slotSpace = static_cast(slot.next); + size_t nextPosition = freePosition + slotSpace; + size_t slotSize = sizeof(Element) * (slotSpace - 1); + size_t minimumNumElementsInSlot = 1 + freeList.minimum_size / sizeof(Element); + if (size <= slotSize) + { + // we can us slot for memory; + size_t numElementsToBeUsed = (size + sizeof(Element) - 1) / sizeof(Element); + if ((numElementsToBeUsed + minimumNumElementsInSlot) < slotSpace) + { + // enough space in slot to split, so adjust + size_t newSlotPosition = freePosition + 1 + numElementsToBeUsed; + slot.next = static_cast(newSlotPosition - freePosition); + + // set up the new slot as a free slot + auto& newSlot = memory[newSlotPosition] = Element(slot.next, static_cast(nextPosition - newSlotPosition), 1); + memory[newSlotPosition+1] = previousFreePosition; + memory[newSlotPosition+2] = nextFreePosition; + + if (previousFreePosition != 0) + { + // need to update the previous slot in the free list + memory[previousFreePosition+2].index = newSlotPosition; // set previous free slots next index to the newly created slot + } + + if (nextFreePosition != 0) + { + // need to update the previous slot in the free list + memory[nextFreePosition+1].index = newSlotPosition; // set next free slots previous index to the newly created slot + } + + if (nextPosition < capacity) + { + auto& nextSlot = memory[nextPosition]; + nextSlot.previous = newSlot.next; + } + + if (freePosition == freeList.head) + { + // slot was at head of freeList so move it to the new slot position + freeList.head = newSlotPosition; + } + //std::cout<<"split slot "<(static_cast(ptr) - memory) - 1; + auto& slot = memory[C]; + + // vsg::debug("IntrusiveAllocator::MemoryBlock::deallocate((", ptr, ", ", size, ") C = ", C, ", slot = { ", static_cast(slot.previous), " , ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); + + if (slot.status != 0) + { + throw "Attempt to deallocatoe already available slot"; + } + + // make slot as available + slot.status = 1; + + // set up the indices for the previous and next slots + size_t P = (slot.previous > 0) ? (C - static_cast(slot.previous)) : 0; + size_t N = C + static_cast(slot.next); + if (N >= capacity) N = 0; + + // set up the indices for the previous free entry + size_t PNF = 0; + if (P != 0) + { + if (memory[P].status != 0) + { + PNF = memory[P+2].index; + } + } + + // set up the indices for the next free entries + size_t NN = 0; + size_t NPF = 0; + size_t NNF = 0; + if (N != 0) + { + NN = N + static_cast(memory[N].next); + if (NN >= capacity) NN = 0; + + if (memory[N].status != 0) + { + NPF = memory[N + 1].index; + NNF = memory[N + 2].index; + } + } + + // 3 way merge of P, C and C + auto mergePCN = [&]() -> void + { + // vsg::debug(" mergePCN(), P = ", P, ", C = ", C, ", N = ", N); + + // update slots for the merge + memory[P].next += memory[C].next + memory[N].next; + if (NN != 0) memory[NN].previous = memory[P].next; + + // update freeList linked list entries + if (PNF == N) // also implies NPF == P + { + // case 1. in order sequential + // vsg::debug(" case 1. in order sequential"); + + memory[P + 2].index = NNF; + if (NNF != 0) memory[NNF + 1].index = P; + } + else if (PNF == N) // also implies NNF == P + { + // case 2. reverse sequential + // vsg::debug(" case 2. reverse sequential"); + + memory[P + 2].index = NNF; + if (NPF != 0) memory[NPF + 1] = P; + } + else // P and N aren't directly connected within the freeList + { + // case 3. out of order + // vsg::debug(" case 3. out of order"); + if (NPF != 0) memory[NPF + 2].index = NNF; + if (NNF != 0) memory[NNF + 1].index = NPF; + } + + // if N was the head then change head to P + if (freeList.head == N) freeList.head = P; + + // N slot is nolonger a seperate free slot so decrement free count + --freeList.count; + }; + + // 2 way merge of P and C + auto mergePC = [&]() -> void + { + // vsg::debug(" mergePC(), P = ", P, ", C = ", C, ", N = ", N); + + // update slots for the merge + memory[P].next += memory[C].next; + if (N != 0) memory[N].previous = memory[P].next; + + // freeList linked list entries will not need updating. + }; + + // 2 way merge of C and N + auto mergeCN = [&]() -> void + { + // vsg::debug(" mergeCN(), P = ", P, ", C = ", C, ", N = ", N, ", NN = ", NN, ", NPF =", NPF, ", NNF = ", NNF); + + // update slots for merge + memory[C].next += memory[N].next; + if (NN != 0) memory[NN].previous = memory[C].next; + + // update freeList linked list entries + if (NPF != 0) memory[NPF + 2].index = C; + if (NNF != 0) memory[NNF + 1].index = C; + memory[C + 1].index = NPF; + memory[C + 2].index = NNF; + + // if N was the head then change head to C + if (freeList.head == N) freeList.head = C; + }; + + // standalone insertion of C into head of freeList + auto standalone = [&]() -> void + { + // vsg::debug(" standalone(), P = ", P, ", C = ", C, ", N = ", N); + memory[C + 1].index = 0; + memory[C + 2].index = freeList.head; + + if (freeList.head != 0) + { + memory[freeList.head + 1] = C; // set previous heads previousFree to C. + } + + // set the head to C. + freeList.head = C; + + // Inserted new free slot so increment free count + ++freeList.count; + }; + + if (P != 0 && memory[P].status != 0) + { + if (N != 0 && memory[N].status != 0) + { + if ((static_cast(memory[P].next) + static_cast(memory[C].next) + static_cast(memory[N].next)) <= maxSize) mergePCN(); + else if ((static_cast(memory[P].next) + static_cast(memory[C].next)) <= maxSize) mergePC(); // merge P and C + else if ((static_cast(memory[C].next) + static_cast(memory[N].next)) <= maxSize) mergeCN(); // merge C and N + else standalone(); // C is standalone + } + else if ((static_cast(memory[P].next) + static_cast(memory[C].next)) <= maxSize) mergePC(); // merge P and C + else standalone(); // C is standalone + } + else if (N != 0 && memory[N].status != 0) + { + if (static_cast(memory[C].next) + static_cast(memory[N].next) <= maxSize) mergeCN(); // merge C and N + else standalone(); // standalone + } + else + { + // C is standalone + standalone(); + } + + return true; + } + + // vsg::debug("IntrusiveAllocator::MemoryBlock::deallocate((", ptr, ", ", size, ") OUTWITH block : ", this); + + return false; +} + +void IntrusiveAllocator::MemoryBlock::report(std::ostream& out) const +{ + out << "MemoryBlock "< capacity || slot.next > capacity) + { + vsg::warn("slot.corrupted invalid position = ", position, ", slot = {", static_cast(slot.previous), ", ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); + return false; + } + + if (slot.previous != 0) + { + if (slot.previous > position) + { + vsg::warn("slot.previous invalid position = ", position, ", slot = {", static_cast(slot.previous), ", ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); + return false; + } + size_t previous_position = position - slot.previous; + if (previous_position != previous) + { + vsg::warn("previous slot = ", previous, " doesn't match slot.previous, position = ", position, ", slot = {", static_cast(slot.previous), ", ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); + return false; + } + } + + previous = position; + position += slot.next; + if (slot.next == 0) break; + } + + // std::cout<<"No invalid entries found"<allocate(size); + if (ptr) return ptr; + } + + size_t new_blockSize = std::max(size, blockSize); + for (auto& block : memoryBlocks) + { + if (block != memoryBlockWithSpace) + { + auto ptr = block->allocate(size); + if (ptr) return ptr; + } + } + + auto new_block = std::make_shared(name, new_blockSize, alignment); + + if (parent) + { + parent->memoryBlocks[new_block->memory] = new_block; + } + + memoryBlockWithSpace = new_block; + memoryBlocks.push_back(new_block); + + auto ptr = new_block->allocate(size); + + return ptr; +} + +void IntrusiveAllocator::MemoryBlocks::report(std::ostream& out) const +{ + out<<"IntrusiveAllocator::MemoryBlocks::report() memoryBlocks.size() = "<report(out); + } +} + +bool IntrusiveAllocator::MemoryBlocks::validate() const +{ + bool valid = true; + for(auto& memoryBlock : memoryBlocks) + { + valid = memoryBlock->validate() && valid ; + } + return valid; +} + +////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// +// IntrusiveAllocator +// +IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAllocator) : + Allocator(std::move(in_nestedAllocator)) +{ + default_alignment = 4; + + size_t Megabyte = size_t(1024) * size_t(1024); +#if 0 + size_t new_blockSize = size_t(1024) * Megabyte; +#else + size_t new_blockSize = size_t(1) * Megabyte; +#endif + + allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", new_blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", new_blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", new_blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", new_blockSize, 16)); + + //// vsg::debug("IntrusiveAllocator()", this); +} + +IntrusiveAllocator::~IntrusiveAllocator() +{ + //// vsg::debug("~IntrusiveAllocator() ", this); +} + +void IntrusiveAllocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) +{ + std::scoped_lock lock(mutex); + + if (size_t(allocatorAffinity) < allocatorMemoryBlocks.size()) + { + allocatorMemoryBlocks[allocatorAffinity]->blockSize = blockSize; + } + else + { + auto name = vsg::make_string("MemoryBlocks_", allocatorAffinity); + + allocatorMemoryBlocks.resize(allocatorAffinity + 1); + allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, name, blockSize, default_alignment)); + } +} + +void IntrusiveAllocator::report(std::ostream& out) const +{ + out << "IntrusiveAllocator::report() " << allocatorMemoryBlocks.size() << std::endl; + + for (const auto& memoryBlock : allocatorMemoryBlocks) + { + if (memoryBlock) memoryBlock->report(out); + } + + validate(); +} + +void* IntrusiveAllocator::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) +{ + std::scoped_lock lock(mutex); + + // create a MemoryBlocks entry if one doesn't already exist + if (allocatorAffinity > allocatorMemoryBlocks.size()) + { + size_t blockSize = 1024 * 1024; // Megabyte + allocatorMemoryBlocks.resize(allocatorAffinity + 1); + allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, "MemoryBlockAffinity", blockSize, default_alignment)); + } + + auto& memoryBlock = allocatorMemoryBlocks[allocatorAffinity]; + if (memoryBlock) + { + auto mem_ptr = memoryBlock->allocate(size); + if (mem_ptr) + { + // vsg::debug("1 IntrusiveAllocator::allocate(", size, ", ", allocatorAffinity, ") ptr = ", mem_ptr); + return mem_ptr; + } + } + + // vsg::debug("2 Fall through IntrusiveAllocator::allocate(", size, ", ", allocatorAffinity, ")"); + + return operator new (size); //, std::align_val_t{default_alignment}); +} + +bool IntrusiveAllocator::deallocate(void* ptr, std::size_t size) +{ + std::scoped_lock lock(mutex); + +#if 0 + + for (auto& memoryBlocks : allocatorMemoryBlocks) + { + if (memoryBlocks && memoryBlocks->deallocate(ptr, size)) + { + return true; + } + } +#else + if (memoryBlocks.empty()) return false; + + auto itr = memoryBlocks.upper_bound(ptr); + if (itr != memoryBlocks.end()) + { + if (itr != memoryBlocks.begin()) + { + --itr; + auto& block = itr->second; + if (block->deallocate(ptr, size)) + { + // vsg::debug("A Allocator::deallocate(", ptr, ", ", size, ") memory = ", itr->first); + return true; + } + else + { + // vsg::debug("B failed Allocator::deallocate(", ptr, ", ", size, ") memory = ", itr->first); + } + } + else + { + auto& block = itr->second; + if (block->deallocate(ptr, size)) + { + // vsg::debug("C Allocator::deallocate(", ptr, ", ", size, ") memory = ", itr->first); + return true; + } + else + { + // vsg::debug("D failed Allocator::deallocate(", ptr, ", ", size, ") memory = ", itr->first); + } + } + } + else + { + auto& block = memoryBlocks.rbegin()->second; + if (block->deallocate(ptr, size)) + { + // vsg::debug("E Allocator::deallocate(", ptr, ", ", size, ") memoryBlocks.rbegin()->first = ", memoryBlocks.rbegin()->first); + return true; + } + else + { + // vsg::debug("F failed Allocator::deallocate(", ptr, ", ", size, ") memoryBlocks.rbegin()->first = ", memoryBlocks.rbegin()->first); + } + } + + +#endif + + + if (nestedAllocator && nestedAllocator->deallocate(ptr, size)) + { + // vsg::debug("G Fall through nestedAllocator->deallocate(", ptr, ", ", size, ")"); + return true; + } + + // vsg::debug("H Fall through"); + + operator delete (ptr); + return true; +} + +bool IntrusiveAllocator::validate() const +{ + bool valid = true; + for(auto& memoryBlock : allocatorMemoryBlocks) + { + valid = memoryBlock->validate() && valid ; + } + return valid; +} + + +size_t IntrusiveAllocator::deleteEmptyMemoryBlocks() { vsg::info("IntrusiveAllocator::deleteEmptyMemoryBlocks(..) TODO"); return 0; } +size_t IntrusiveAllocator::totalAvailableSize() const { vsg::info("IntrusiveAllocator::totalAvailableSize(..) TODO"); return 0; } +size_t IntrusiveAllocator::totalReservedSize() const { vsg::info("IntrusiveAllocator::totalReservedSize(..) TODO"); return 0; } +size_t IntrusiveAllocator::totalMemorySize() const { vsg::info("IntrusiveAllocator::totalMemorySize(..) TODO"); return 0; } +void IntrusiveAllocator::setMemoryTracking(int) { vsg::info("IntrusiveAllocator::setMemoryTracking(..) TODO"); } From 28c3c17f60f3ad529dde23f5204f8fc0de26b693 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 13 Jun 2024 09:35:19 +0100 Subject: [PATCH 06/43] Added validation checks to help with debugging --- src/vsg/core/Allocator.cpp | 79 +++++++++++++++++++++++++++++++++----- 1 file changed, 69 insertions(+), 10 deletions(-) diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index f1ccb7944..ad57df771 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -17,12 +17,20 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI #include #include +#include using namespace vsg; +vsg::Allocator* createAllocator(const char* env) +{ + const char* result = getenv(env); + if (result && strcmp(result, "NEW")==0) return new IntrusiveAllocator(); + else return new OriginalBlockAllocator(); +} + std::unique_ptr& Allocator::instance() { - static std::unique_ptr s_allocator(new OriginalBlockAllocator()); + static std::unique_ptr s_allocator(createAllocator("VSG_ALLOCATOR")); return s_allocator; } @@ -567,6 +575,8 @@ bool IntrusiveAllocator::MemoryBlock::freeSlotsAvaible(size_t size) const void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) { + if (!validate()) std::cout<<"ERROR detected before IntrusiveAllocator::MemoryBlock::allocate("<(slot.next); + if (slot.next==0) + { + std::cerr<<"Warn: IntrusiveAllocator::MemoryBlock::allocate("<(slot.previous)<<", "<(slot.next)<<", "<(slot.status)<<" }"<(slot.previous), " , ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); + if (slot.next==0) + { + std::cerr<<"Warn: IntrusiveAllocator::MemoryBlock::deallocate("< capacity || slot.next > capacity) { - vsg::warn("slot.corrupted invalid position = ", position, ", slot = {", static_cast(slot.previous), ", ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); + std::cerr<<"IntrusiveAllocator::MemoryBlock::validate() "< position) { - vsg::warn("slot.previous invalid position = ", position, ", slot = {", static_cast(slot.previous), ", ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); + std::cerr<<"IntrusiveAllocator::MemoryBlock::validate() "<(slot.previous), ", ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); + std::cerr<<"IntrusiveAllocator::MemoryBlock::validate() "<(slot.previous)<<", "<(slot.next)<<", "<(slot.status)<<"}"< in_nestedAlloc allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", new_blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", new_blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * new_blockSize, default_alignment)); allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", new_blockSize, default_alignment)); allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", new_blockSize, 16)); From ae98ad902e4d7b64c9726caea07f965cf12a1b6b Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Fri, 14 Jun 2024 13:15:48 +0100 Subject: [PATCH 07/43] Added extra validation checks and fixed freelist bugs --- include/vsg/core/Allocator.h | 26 ++++++ src/vsg/core/Allocator.cpp | 154 +++++++++++++++++++++++++++++------ 2 files changed, 153 insertions(+), 27 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index 970a00f62..bcf88c2e4 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -268,6 +268,9 @@ namespace vsg next(in_next), status(in_status) {} + Element() = default; + Element(const Element&) = default; + union { uint32_t index; @@ -307,6 +310,29 @@ namespace vsg inline bool within(void* ptr) const { return memory <= ptr && ptr < memory_end; } + struct SlotTester + { + SlotTester(Element* in_mem, size_t in_head) : mem(in_mem), head(in_head) {}; + + const Element* mem = nullptr; + size_t head = 0; + + struct Entry + { + std::string name; + size_t position; + Element slot; + size_t previousFree; + size_t nextFree; + }; + + std::list elements; + + void slot(size_t position, const std::string& name); + + void report(std::ostream& out); + }; + }; class MemoryBlocks diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index ad57df771..c78232e40 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -602,11 +602,11 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) size_t nextPosition = freePosition + slotSpace; size_t slotSize = sizeof(Element) * (slotSpace - 1); - size_t minimumNumElementsInSlot = 1 + freeList.minimum_size / sizeof(Element); if (size <= slotSize) { // we can us slot for memory; - size_t numElementsToBeUsed = (size + sizeof(Element) - 1) / sizeof(Element); + size_t minimumNumElementsInSlot = 1 + freeList.minimum_size / sizeof(Element); + size_t numElementsToBeUsed = std::max((size + sizeof(Element) - 1) / sizeof(Element), minimumNumElementsInSlot); if ((numElementsToBeUsed + minimumNumElementsInSlot) < slotSpace) { // enough space in slot to split, so adjust @@ -680,7 +680,7 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) // vsg::debug("IntrusiveAllocator::MemoryBlock::allocate(", size, ") slot used = ", freePosition, ", ", &memory[freePosition+1]); - if (validate()) std::cout<<"IntrusiveAllocator::MemoryBlock::allocate("<(slot.status)<(entry.slot.status)<<" } "; + if (entry.slot.status != 0) out<<" previous free = "<(static_cast(ptr) - memory) - 1; auto& slot = memory[C]; + if (validate()) + { + std::cout<<"IntrusiveAllocator::MemoryBlock::deallocate("<(slot.status)<(slot.previous), " , ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); if (slot.next==0) @@ -738,20 +756,19 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t size) throw "Attempt to deallocate already available slot"; } - // make slot as available - slot.status = 1; - // set up the indices for the previous and next slots size_t P = (slot.previous > 0) ? (C - static_cast(slot.previous)) : 0; size_t N = C + static_cast(slot.next); if (N >= capacity) N = 0; // set up the indices for the previous free entry + size_t PPF = 0; size_t PNF = 0; if (P != 0) { if (memory[P].status != 0) { + PPF = memory[P+1].index; PNF = memory[P+2].index; } } @@ -777,44 +794,75 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t size) { // vsg::debug(" mergePCN(), P = ", P, ", C = ", C, ", N = ", N); + SlotTester before(memory, freeList.head); + before.slot(P, "P"); + before.slot(C, "C"); + before.slot(N, "N"); + before.slot(PPF, "PPF"); + before.slot(PNF, "PNF"); + before.slot(NPF, "NPF"); + before.slot(NNF, "NNF"); + // update slots for the merge memory[P].next += memory[C].next + memory[N].next; if (NN != 0) memory[NN].previous = memory[P].next; + // update freeList linked list entries if (PNF == N) // also implies NPF == P { // case 1. in order sequential - // vsg::debug(" case 1. in order sequential"); + std::cout<<" case 1. in order sequential"<(slot.previous)<<", "<(slot.next)<<", "<(slot.status)<<"}"<(slot.status)<<"}"< inFreeList; + // std::cout<<"No invalid entries found"<(slot.status)<<"}"<(slot.status)<<"} previousFree = "<(slot.status)<(slot.status)<(slot.previous), " , ", static_cast(slot.next), ", ", static_cast(slot.status), "}"); +#endif if (slot.next==0) { @@ -792,8 +796,7 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t size) // 3 way merge of P, C and C auto mergePCN = [&]() -> void { - // vsg::debug(" mergePCN(), P = ", P, ", C = ", C, ", N = ", N); - +#if DEBUG_ALLOCATOR SlotTester before(memory, freeList.head); before.slot(P, "P"); before.slot(C, "C"); @@ -802,7 +805,7 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t size) before.slot(PNF, "PNF"); before.slot(NPF, "NPF"); before.slot(NNF, "NNF"); - +#endif // update slots for the merge memory[P].next += memory[C].next + memory[N].next; if (NN != 0) memory[NN].previous = memory[P].next; @@ -812,16 +815,18 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t size) if (PNF == N) // also implies NPF == P { // case 1. in order sequential +#if DEBUG_ALLOCATOR std::cout<<" case 1. in order sequential"< Date: Mon, 17 Jun 2024 12:58:41 +0100 Subject: [PATCH 09/43] Bumped version number for vsg::Allocator rewrite --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d07a7a7e8..dbed0c908 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required(VERSION 3.7) project(vsg - VERSION 1.1.5 + VERSION 1.1.6 DESCRIPTION "VulkanSceneGraph library" LANGUAGES CXX ) From c7df286a7e21a26c0beb0ff203f0dc7018c09e67 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Mon, 17 Jun 2024 19:00:20 +0100 Subject: [PATCH 10/43] Cleaned up naming --- include/vsg/core/Allocator.h | 17 ++++--- src/vsg/core/Allocator.cpp | 98 +++++++++++++++++++----------------- 2 files changed, 62 insertions(+), 53 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index bcf88c2e4..74163cc7f 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -286,21 +286,22 @@ namespace vsg struct FreeList { - size_t minimum_size = 0; - size_t maximum_size = 0; + size_t minimumSize = 0; + size_t maximumSize = 0; size_t count = 0; size_t head = 0; }; using Offset = uint16_t; - using value_type = Element; - value_type* memory = nullptr; - value_type* memory_end = nullptr; + Element* memory = nullptr; + Element* memoryEnd = nullptr; size_t capacity = 0; - size_t alignment = 4; // min aligment is 4. - size_t block_alignment = 16; + size_t alignment = 4; // min aligment is 4 { sizeof(Element) } + size_t elementAlignment = 1; + size_t blockAlignment = 16; size_t blockSize = 0; + size_t maximumSize = 0; std::vector freeLists; @@ -308,7 +309,7 @@ namespace vsg bool freeSlotsAvaible(size_t size) const; - inline bool within(void* ptr) const { return memory <= ptr && ptr < memory_end; } + inline bool within(void* ptr) const { return memory <= ptr && ptr < memoryEnd; } struct SlotTester { diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 8db862480..e91918a64 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -516,31 +516,32 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t alignment(in_alignment), blockSize(in_blockSize) { - // // vsg::debug("IntrusiveAllocator::MemoryBlock::MemoryBlock(", in_blockSize, ", ", in_alignment, ")"); + alignment = std::max(alignment, sizeof(Element)); // we need to be a multiple of sizeof(value_type) + elementAlignment = alignment / sizeof(Element); - alignment = std::max(alignment, sizeof(value_type)); // we need to be a multiple of sizeof(value_type) - block_alignment = std::max(alignment, alignof(std::max_align_t)); - block_alignment = std::max(block_alignment, size_t{16}); + blockAlignment = std::max(alignment, alignof(std::max_align_t)); + blockAlignment = std::max(blockAlignment, size_t{16}); // round blockSize up to nearest aligned size blockSize = ((blockSize+alignment-1) / alignment) * alignment; - //memory = static_cast(operator new (blockSize, std::align_val_t{block_alignment})); - memory = static_cast(operator new (blockSize)); - memory_end = memory + blockSize / sizeof(value_type); + memory = static_cast(operator new (blockSize, std::align_val_t{blockAlignment})); + memoryEnd = memory + blockSize / sizeof(Element); capacity = blockSize / alignment; - size_t max_slot_size = (1 << 15) - 1; + size_t num_elements = blockSize / sizeof(Element); + size_t max_slot_size = (1 << 15); // // vsg::debug(" capacity = ", capacity, ", max_slot_size = ", max_slot_size); // set up the free tracking to encompass the whole buffer freeLists.emplace_back(); FreeList& freeList = freeLists.front(); - freeList.minimum_size = 2 * sizeof(Element); - freeList.maximum_size = (max_slot_size - 1) * sizeof(Element); - freeList.head = 1; // start at position 1 so that position 0 can be used to mark beginning or end of free lists + freeList.minimumSize = 2 * sizeof(Element); freeList.count = 0; + freeList.head = ((1 + elementAlignment)/elementAlignment) * elementAlignment - 1; // start at position 1 so that position 0 can be used to mark beginning or end of free lists + maximumSize = freeList.maximumSize = (num_elements - freeList.head) * sizeof(Element); + // mark the first element as 0. memory[0].index = 0; @@ -549,7 +550,8 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t size_t position = freeList.head; for(; position < capacity;) { - size_t next_position = std::min(position + max_slot_size, capacity); + size_t aligned_start = ((position + max_slot_size) / elementAlignment) * elementAlignment; + size_t next_position = std::min(aligned_start-1, capacity); memory[position] = Element{ (previous_position == 0) ? 0 : (position - previous_position), next_position - position, 1 }; memory[position+1].index = previous_position; @@ -558,6 +560,19 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t position = next_position; ++freeList.count; } + +#if DEBUG_ALLOCATOR + std::cout<<"IntrusiveAllocator::MemoryBlock::MemoryBlock("< freeList.maximum_size) continue; + // check if freeList has available slots and maximumSize is big enough + if (freeList.count == 0 || size > freeList.maximumSize) continue; size_t freePosition = freeList.head; while (freePosition != 0) @@ -609,14 +626,24 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) if (size <= slotSize) { // we can us slot for memory; - size_t minimumNumElementsInSlot = 1 + freeList.minimum_size / sizeof(Element); + size_t numElementsToBeUsed = std::max((size + sizeof(Element) - 1) / sizeof(Element), minimumNumElementsInSlot); - if ((numElementsToBeUsed + minimumNumElementsInSlot) < slotSpace) + + size_t nextAlignedStart = ((freePosition + 1 + numElementsToBeUsed + elementAlignment) / elementAlignment) * elementAlignment; + size_t minimumAlignedEnd = nextAlignedStart + minimumNumElementsInSlot; +#if DEBUG_ALLOCATOR + std::cout<<"allocating, size = "<(static_cast(ptr) - memory) - 1; auto& slot = memory[C]; @@ -878,8 +897,6 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ // 2 way merge of P and C auto mergePC = [&]() -> void { - // vsg::debug(" mergePC(), P = ", P, ", C = ", C, ", N = ", N); - // update slots for the merge memory[P].next += memory[C].next; if (N != 0) memory[N].previous = memory[P].next; @@ -897,8 +914,6 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ // 2 way merge of C and N auto mergeCN = [&]() -> void { - // vsg::debug(" mergeCN(), P = ", P, ", C = ", C, ", N = ", N, ", NN = ", NN, ", NPF =", NPF, ", NNF = ", NNF); - // update slots for merge memory[C].status = 1; memory[C].next += memory[N].next; @@ -924,7 +939,6 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ // standalone insertion of C into head of freeList auto standalone = [&]() -> void { - // vsg::debug(" standalone(), P = ", P, ", C = ", C, ", N = ", N); memory[C].status = 1; memory[C + 1].index = 0; memory[C + 2].index = freeList.head; @@ -985,7 +999,7 @@ void IntrusiveAllocator::MemoryBlock::report(std::ostream& out) const { out << "MemoryBlock "< in_nestedAlloc default_alignment = 4; size_t Megabyte = size_t(1024) * size_t(1024); -#if 0 - size_t new_blockSize = size_t(1024) * Megabyte; -#else - size_t new_blockSize = size_t(1) * Megabyte; -#endif + size_t blockSize = size_t(1) * Megabyte; allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", new_blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * new_blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", new_blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", new_blockSize, 16)); - - //// vsg::debug("IntrusiveAllocator()", this); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); } IntrusiveAllocator::~IntrusiveAllocator() From b0b3294f561d594a1e034b7bdd3d1b35905d0138 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Mon, 17 Jun 2024 20:01:31 +0100 Subject: [PATCH 11/43] Added maximumAllocationSize --- include/vsg/core/Allocator.h | 7 ++++--- src/vsg/core/Allocator.cpp | 17 +++++++++-------- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index 74163cc7f..e95367789 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -287,7 +287,7 @@ namespace vsg struct FreeList { size_t minimumSize = 0; - size_t maximumSize = 0; + size_t maximumAllocationSize = 0; size_t count = 0; size_t head = 0; }; @@ -301,7 +301,7 @@ namespace vsg size_t elementAlignment = 1; size_t blockAlignment = 16; size_t blockSize = 0; - size_t maximumSize = 0; + size_t maximumAllocationSize = 0; std::vector freeLists; @@ -345,7 +345,8 @@ namespace vsg IntrusiveAllocator* parent = nullptr; std::string name; size_t alignment = 4; - size_t blockSize; + size_t blockSize = 0; + size_t maximumAllocationSize = 0; std::vector> memoryBlocks; std::shared_ptr memoryBlockWithSpace; diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index e91918a64..d30db977a 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -540,8 +540,7 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t freeList.minimumSize = 2 * sizeof(Element); freeList.count = 0; freeList.head = ((1 + elementAlignment)/elementAlignment) * elementAlignment - 1; // start at position 1 so that position 0 can be used to mark beginning or end of free lists - maximumSize = freeList.maximumSize = (num_elements - freeList.head) * sizeof(Element); - + maximumAllocationSize = freeList.maximumAllocationSize = std::min(num_elements - freeList.head, (max_slot_size-1)) * sizeof(Element); // mark the first element as 0. memory[0].index = 0; @@ -585,7 +584,7 @@ bool IntrusiveAllocator::MemoryBlock::freeSlotsAvaible(size_t size) const { for(auto& freeList : freeLists) { - if (freeList.maximumSize >= size && freeList.count > 0) return true; + if (freeList.maximumAllocationSize >= size && freeList.count > 0) return true; } return false; } @@ -600,8 +599,8 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) for(auto& freeList : freeLists) { - // check if freeList has available slots and maximumSize is big enough - if (freeList.count == 0 || size > freeList.maximumSize) continue; + // check if freeList has available slots and maximumAllocationSize is big enough + if (freeList.count == 0 || size > freeList.maximumAllocationSize) continue; size_t freePosition = freeList.head; while (freePosition != 0) @@ -743,7 +742,7 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ if (within(ptr)) { auto& freeList = freeLists.front(); - size_t maxSize = 1 + freeList.maximumSize / sizeof(Element); + size_t maxSize = 1 + freeList.maximumAllocationSize / sizeof(Element); // // sequential slots around the slot to be deallocated are named: @@ -1001,6 +1000,7 @@ void IntrusiveAllocator::MemoryBlock::report(std::ostream& out) const out << " alignment = "<(name, new_blockSize, alignment); - if (parent) { parent->memoryBlocks[new_block->memory] = new_block; } + maximumAllocationSize = new_block->maximumAllocationSize; memoryBlockWithSpace = new_block; memoryBlocks.push_back(new_block); From 8a10e6fa9c6eda5cfe0ad682808d3fc1d295e7d6 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 18 Jun 2024 11:25:19 +0100 Subject: [PATCH 12/43] Restructured how the maximumAllocationSize is computed and managed --- include/vsg/core/Allocator.h | 6 ++++-- src/vsg/core/Allocator.cpp | 24 ++++++++++++++---------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index e95367789..faf1cc5c3 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -286,8 +286,6 @@ namespace vsg struct FreeList { - size_t minimumSize = 0; - size_t maximumAllocationSize = 0; size_t count = 0; size_t head = 0; }; @@ -334,6 +332,10 @@ namespace vsg void report(std::ostream& out); }; + static inline size_t computeMaxiumAllocationSize(size_t blockSize, size_t alignment) + { + return std::min(blockSize - alignment, size_t((1<<15)-1) * sizeof(Element)); + } }; class MemoryBlocks diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index d30db977a..7802aade0 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -529,7 +529,6 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t memoryEnd = memory + blockSize / sizeof(Element); capacity = blockSize / alignment; - size_t num_elements = blockSize / sizeof(Element); size_t max_slot_size = (1 << 15); // // vsg::debug(" capacity = ", capacity, ", max_slot_size = ", max_slot_size); @@ -537,10 +536,9 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t // set up the free tracking to encompass the whole buffer freeLists.emplace_back(); FreeList& freeList = freeLists.front(); - freeList.minimumSize = 2 * sizeof(Element); freeList.count = 0; freeList.head = ((1 + elementAlignment)/elementAlignment) * elementAlignment - 1; // start at position 1 so that position 0 can be used to mark beginning or end of free lists - maximumAllocationSize = freeList.maximumAllocationSize = std::min(num_elements - freeList.head, (max_slot_size-1)) * sizeof(Element); + maximumAllocationSize = computeMaxiumAllocationSize(blockSize, alignment); // mark the first element as 0. memory[0].index = 0; @@ -582,9 +580,11 @@ IntrusiveAllocator::MemoryBlock::~MemoryBlock() bool IntrusiveAllocator::MemoryBlock::freeSlotsAvaible(size_t size) const { + if (size > maximumAllocationSize) return false; + for(auto& freeList : freeLists) { - if (freeList.maximumAllocationSize >= size && freeList.count > 0) return true; + if (freeList.count > 0) return true; } return false; } @@ -600,7 +600,7 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) for(auto& freeList : freeLists) { // check if freeList has available slots and maximumAllocationSize is big enough - if (freeList.count == 0 || size > freeList.maximumAllocationSize) continue; + if (freeList.count == 0 || size > maximumAllocationSize) continue; size_t freePosition = freeList.head; while (freePosition != 0) @@ -742,7 +742,7 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ if (within(ptr)) { auto& freeList = freeLists.front(); - size_t maxSize = 1 + freeList.maximumAllocationSize / sizeof(Element); + size_t maxSize = 1 + maximumAllocationSize / sizeof(Element); // // sequential slots around the slot to be deallocated are named: @@ -1022,7 +1022,7 @@ void IntrusiveAllocator::MemoryBlock::report(std::ostream& out) const out<<" freeList.size() = "<memoryBlocks[new_block->memory] = new_block; } - maximumAllocationSize = new_block->maximumAllocationSize; + if (memoryBlocks.empty()) + { + maximumAllocationSize = new_block->maximumAllocationSize; + } + memoryBlockWithSpace = new_block; memoryBlocks.push_back(new_block); From 9d74d549a11144a1666274ed57bef271d0502e8f Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 18 Jun 2024 15:37:50 +0100 Subject: [PATCH 13/43] Added handling of large memory allocations --- include/vsg/core/Allocator.h | 3 +- src/vsg/core/Allocator.cpp | 65 ++++++++++++++++++++---------------- 2 files changed, 39 insertions(+), 29 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index faf1cc5c3..80430d735 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -334,7 +334,7 @@ namespace vsg static inline size_t computeMaxiumAllocationSize(size_t blockSize, size_t alignment) { - return std::min(blockSize - alignment, size_t((1<<15)-1) * sizeof(Element)); + return std::min(blockSize - alignment, size_t((1<<15)-2) * sizeof(Element)); } }; @@ -359,6 +359,7 @@ namespace vsg std::vector> allocatorMemoryBlocks; std::map> memoryBlocks; + std::map largeAllocations; }; diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 7802aade0..ac1a07d88 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -622,6 +622,7 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) size_t nextPosition = freePosition + slotSpace; size_t slotSize = sizeof(Element) * (slotSpace - 1); + if (size <= slotSize) { // we can us slot for memory; @@ -713,9 +714,12 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) freePosition = nextFreePosition; } - } +#if DEBUG_ALLOCATOR + std::cout<<"IntrusiveAllocator::MemoryBlock::allocator("< in_nestedAllocator) : Allocator(std::move(in_nestedAllocator)) { + std::cout<<"IntrusiveAllocator::IntrusiveAllocator()"< in_nestedAlloc IntrusiveAllocator::~IntrusiveAllocator() { - //// vsg::debug("~IntrusiveAllocator() ", this); + std::cout<<"IntrusiveAllocator::~IntrusiveAllocator() largeAllocations.size() = "<allocate(size); - if (mem_ptr) + if (size <= blocks->maximumAllocationSize) { - // vsg::debug("1 IntrusiveAllocator::allocate(", size, ", ", allocatorAffinity, ") ptr = ", mem_ptr); - return mem_ptr; + ptr = blocks->allocate(size); + if (ptr) return ptr; + std::cout<<"IntrusiveAllocator::allocate() Failed to allocator memory from memoryBlocks "<alignment}); + if (ptr) largeAllocations[ptr] = size; + std::cout<<"IntrusiveAllocator::allocate() MemoryBlocks aligned large allocation = "<maximumAllocationSize = "<maximumAllocationSize<deallocate(ptr, size)) { - // vsg::debug("G Fall through nestedAllocator->deallocate(", ptr, ", ", size, ")"); return true; } - // vsg::debug("H Fall through"); - - operator delete (ptr); - return true; + return false; } bool IntrusiveAllocator::validate() const From 2761d2c715ce8102f419586b949f2b5b342bbf24 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 19 Jun 2024 12:13:33 +0100 Subject: [PATCH 14/43] Added mutex to FindDynamicObjects and PropagateDynamicObjects to avoid multiple threads using the same instances at the same time. --- include/vsg/utils/FindDynamicObjects.h | 2 ++ include/vsg/utils/PropagateDynamicObjects.h | 1 + src/vsg/io/read.cpp | 4 ++++ 3 files changed, 7 insertions(+) diff --git a/include/vsg/utils/FindDynamicObjects.h b/include/vsg/utils/FindDynamicObjects.h index 1fc167451..2e1427256 100644 --- a/include/vsg/utils/FindDynamicObjects.h +++ b/include/vsg/utils/FindDynamicObjects.h @@ -24,6 +24,8 @@ namespace vsg class VSG_DECLSPEC FindDynamicObjects : public Inherit { public: + + std::mutex mutex; std::set dynamicObjects; inline void tag(const Object* object) diff --git a/include/vsg/utils/PropagateDynamicObjects.h b/include/vsg/utils/PropagateDynamicObjects.h index ccfde409a..4c60514f5 100644 --- a/include/vsg/utils/PropagateDynamicObjects.h +++ b/include/vsg/utils/PropagateDynamicObjects.h @@ -26,6 +26,7 @@ namespace vsg public: PropagateDynamicObjects(); + std::mutex mutex; std::set dynamicObjects; std::stack taggedStack; diff --git a/src/vsg/io/read.cpp b/src/vsg/io/read.cpp index dca5a789b..cf578387c 100644 --- a/src/vsg/io/read.cpp +++ b/src/vsg/io/read.cpp @@ -77,9 +77,13 @@ ref_ptr vsg::read(const Path& filename, ref_ptr options) if (load->object && options && options->findDynamicObjects && options->propagateDynamicObjects) { // invoke the find and propogate visitiors to collate all the dynamic objects that will need to be cloned. + + std::scoped_lock fdo_lock(options->findDynamicObjects->mutex); + options->findDynamicObjects->dynamicObjects.clear(); load->object->accept(*(options->findDynamicObjects)); + std::scoped_lock pdo_lock(options->propagateDynamicObjects->mutex); options->propagateDynamicObjects->dynamicObjects.swap(options->findDynamicObjects->dynamicObjects); load->object->accept(*(options->propagateDynamicObjects)); From 648887528af1673d7442fd47f6a473b9ccffbdfa Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 19 Jun 2024 12:15:54 +0100 Subject: [PATCH 15/43] Commented out/delete debug messages --- src/vsg/core/Allocator.cpp | 26 ++++---------------------- 1 file changed, 4 insertions(+), 22 deletions(-) diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index ac1a07d88..ab44fca65 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -1284,21 +1284,18 @@ void* IntrusiveAllocator::allocate(std::size_t size, AllocatorAffinity allocator { ptr = blocks->allocate(size); if (ptr) return ptr; - std::cout<<"IntrusiveAllocator::allocate() Failed to allocator memory from memoryBlocks "<alignment}); if (ptr) largeAllocations[ptr] = size; - std::cout<<"IntrusiveAllocator::allocate() MemoryBlocks aligned large allocation = "<maximumAllocationSize = "<maximumAllocationSize<maximumAllocationSize = "<maximumAllocationSize<first = ", memoryBlocks.rbegin()->first); return true; } - else - { - // vsg::debug("F failed Allocator::deallocate(", ptr, ", ", size, ") memoryBlocks.rbegin()->first = ", memoryBlocks.rbegin()->first); - } } auto la_itr = largeAllocations.find(ptr); if (la_itr != largeAllocations.end()) { // large allocation; - std::cout<<"IntrusiveAllocator::deallocate("< Date: Wed, 19 Jun 2024 16:14:21 +0100 Subject: [PATCH 16/43] Changed the Offset type definition to address Windows warnings --- include/vsg/core/Allocator.h | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index 80430d735..f7a87d6d4 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -259,18 +259,6 @@ namespace vsg // bitfield packing of doubly-linked with status field into a 4 byte word struct Element { - - Element(size_t in_index) : - index(in_index) {} - - Element(size_t in_previous, size_t in_next, unsigned int in_status) : - previous(in_previous), - next(in_next), - status(in_status) {} - - Element() = default; - Element(const Element&) = default; - union { uint32_t index; @@ -282,6 +270,20 @@ namespace vsg unsigned int status : 2; }; }; + + using Offset = decltype(previous); + + Element(size_t in_index) : + index(static_cast(in_index)) {} + + Element(size_t in_previous, size_t in_next, unsigned int in_status) : + previous(static_cast(in_previous)), + next(static_cast(in_next)), + status(in_status) {} + + Element() = default; + Element(const Element&) = default; + }; struct FreeList @@ -290,7 +292,6 @@ namespace vsg size_t head = 0; }; - using Offset = uint16_t; Element* memory = nullptr; Element* memoryEnd = nullptr; size_t capacity = 0; From 55d489eaef07781248b9e0527b5e9de29c6e018d Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 19 Jun 2024 16:21:03 +0100 Subject: [PATCH 17/43] Fixed type --- src/vsg/core/Allocator.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index ab44fca65..a6c890225 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -639,13 +639,13 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) // enough space in slot to split, so adjust size_t newSlotPosition = nextAlignedStart-1; - slot.next = static_cast(newSlotPosition - freePosition); + slot.next = static_cast(newSlotPosition - freePosition); #if DEBUG_ALLOCATOR std::cout<<"splitting slot newSlotPosition = "<(nextPosition - newSlotPosition), 1); + auto& newSlot = memory[newSlotPosition] = Element(slot.next, static_cast(nextPosition - newSlotPosition), 1); memory[newSlotPosition+1] = previousFreePosition; memory[newSlotPosition+2] = nextFreePosition; From d82262efffd86f92ecea727189e5dafd782ac990 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 19 Jun 2024 16:49:45 +0100 Subject: [PATCH 18/43] Warning fixes --- include/vsg/core/Allocator.h | 2 ++ src/vsg/core/Allocator.cpp | 48 +++++++++++++++++++----------------- 2 files changed, 27 insertions(+), 23 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index f7a87d6d4..5bb7fed17 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -272,6 +272,8 @@ namespace vsg }; using Offset = decltype(previous); + using Status = decltype(status); + using Index = decltype(index); Element(size_t in_index) : index(static_cast(in_index)) {} diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index a6c890225..7cb1c8908 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -551,8 +551,8 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t size_t next_position = std::min(aligned_start-1, capacity); memory[position] = Element{ (previous_position == 0) ? 0 : (position - previous_position), next_position - position, 1 }; - memory[position+1].index = previous_position; - memory[position+2].index = (next_position < capacity) ? next_position : 0; + memory[position+1].index = static_cast(previous_position); + memory[position+2].index = static_cast((next_position < capacity) ? next_position : 0); previous_position = position; position = next_position; ++freeList.count; @@ -595,12 +595,15 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) if (!validate()) std::cout<<"ERROR detected before IntrusiveAllocator::MemoryBlock::allocate("< maximumAllocationSize) return nullptr; + const size_t minimumNumElementsInSlot = 3; for(auto& freeList : freeLists) { - // check if freeList has available slots and maximumAllocationSize is big enough - if (freeList.count == 0 || size > maximumAllocationSize) continue; + // check if freeList has available slots + if (freeList.count == 0) continue; size_t freePosition = freeList.head; while (freePosition != 0) @@ -611,8 +614,8 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) throw "Warning: allocated slot found in freeList"; } - size_t previousFreePosition = memory[freePosition+1].index; - size_t nextFreePosition = memory[freePosition+2].index; + Element::Index previousFreePosition = memory[freePosition+1].index; + Element::Index nextFreePosition = memory[freePosition+2].index; size_t slotSpace = static_cast(slot.next); if (slot.next==0) @@ -620,17 +623,16 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) std::cerr<<"Warn: IntrusiveAllocator::MemoryBlock::allocate("<(slot.previous)<<", "<(slot.next)<<", "<(slot.status)<<" }"<(std::max((size + sizeof(Element) - 1) / sizeof(Element), minimumNumElementsInSlot)); + Element::Index nextAlignedStart = ((freePosition + 1 + numElementsToBeUsed + elementAlignment) / elementAlignment) * elementAlignment; + Element::Index minimumAlignedEnd = nextAlignedStart + minimumNumElementsInSlot; #if DEBUG_ALLOCATOR std::cout<<"allocating, size = "<(slot.previous)<<", "<(slot.next)<<", "<(slot.status)<<" }"<(slotSpace); size_t slotSize = sizeof(Element) * (slotSpace - 1); if (size <= slotSize) { // we can us slot for memory; - Element::Index numElementsToBeUsed = static_cast(std::max((size + sizeof(Element) - 1) / sizeof(Element), minimumNumElementsInSlot)); - Element::Index nextAlignedStart = ((freePosition + 1 + numElementsToBeUsed + elementAlignment) / elementAlignment) * elementAlignment; - Element::Index minimumAlignedEnd = nextAlignedStart + minimumNumElementsInSlot; + size_t numElementsToBeUsed = std::max((size + sizeof(Element) - 1) / sizeof(Element), minimumNumElementsInSlot); + Element::Index nextAlignedStart = static_cast(((freePosition + 1 + numElementsToBeUsed + elementAlignment) / elementAlignment) * elementAlignment); + Element::Index minimumAlignedEnd = nextAlignedStart + static_cast(minimumNumElementsInSlot); #if DEBUG_ALLOCATOR std::cout<<"allocating, size = "< #include #include +#include #include #include @@ -23,8 +24,7 @@ using namespace vsg; vsg::Allocator* createAllocator(const char* env) { - const char* result = getenv(env); - if (result && strcmp(result, "NEW")==0) return new IntrusiveAllocator(); + if (vsg::getEnv(env)=="NEW") return new IntrusiveAllocator(); else return new OriginalBlockAllocator(); } From aad4ec721318110d00d372c621ad08fee998803e Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 20 Jun 2024 11:15:26 +0100 Subject: [PATCH 22/43] Added alignment to delete of memory --- include/vsg/core/Allocator.h | 2 +- src/vsg/core/Allocator.cpp | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index a7799993a..e750a515c 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -362,7 +362,7 @@ namespace vsg std::vector> allocatorMemoryBlocks; std::map> memoryBlocks; - std::map largeAllocations; + std::map> largeAllocations; }; diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 4572c489f..6b9a1cb19 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -575,8 +575,7 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t IntrusiveAllocator::MemoryBlock::~MemoryBlock() { - //operator delete (memory, std::align_val_t{block_alignment}); - operator delete (memory); + operator delete(memory, std::align_val_t{blockAlignment}); } bool IntrusiveAllocator::MemoryBlock::freeSlotsAvaible(size_t size) const @@ -1291,13 +1290,13 @@ void* IntrusiveAllocator::allocate(std::size_t size, AllocatorAffinity allocator } ptr = operator new (size, std::align_val_t{blocks->alignment}); - if (ptr) largeAllocations[ptr] = size; + if (ptr) largeAllocations[ptr] = std::pair(blocks->alignment, size); //std::cout<<"IntrusiveAllocator::allocate() MemoryBlocks aligned large allocation = "<maximumAllocationSize = "<maximumAllocationSize<(default_alignment, size); //std::cout<<"IntrusiveAllocator::allocate() default aligned large allocation = "<second.first}); largeAllocations.erase(la_itr); return true; } From c9bded23f6afeded13de9044f67d6e80155c926c Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 20 Jun 2024 13:14:13 +0100 Subject: [PATCH 23/43] Ran clang-format --- include/vsg/core/Allocator.h | 17 +- include/vsg/utils/FindDynamicObjects.h | 1 - src/vsg/core/Allocator.cpp | 292 +++++++++++++------------ src/vsg/utils/ShaderCompiler.cpp | 2 +- 4 files changed, 166 insertions(+), 146 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index e750a515c..981445389 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -41,8 +41,10 @@ namespace vsg class VSG_DECLSPEC Allocator { public: - explicit Allocator(size_t in_default_alignment = 4) : default_alignment(in_default_alignment) {} - explicit Allocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4) : default_alignment(in_default_alignment), nestedAllocator(std::move(in_nestedAllocator)) {} + explicit Allocator(size_t in_default_alignment = 4) : + default_alignment(in_default_alignment) {} + explicit Allocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4) : + default_alignment(in_default_alignment), nestedAllocator(std::move(in_nestedAllocator)) {} virtual ~Allocator() {} /// Allocator singleton @@ -83,7 +85,6 @@ namespace vsg double deallocationTime = 0.0; protected: - // if you are assigning a custom allocator you must retain the old allocator to manage the memory it allocated and needs to delete std::unique_ptr nestedAllocator; }; @@ -216,7 +217,6 @@ namespace vsg double deallocationTime = 0.0; protected: - std::vector> allocatorMemoryBlocks; }; @@ -243,7 +243,6 @@ namespace vsg void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; protected: - struct MemoryBlock { MemoryBlock(const std::string& in_name, size_t in_blockSize, size_t in_alignment); @@ -285,7 +284,6 @@ namespace vsg Element() = default; Element(const Element&) = default; - }; struct FreeList @@ -314,7 +312,8 @@ namespace vsg struct SlotTester { - SlotTester(Element* in_mem, size_t in_head) : mem(in_mem), head(in_head) {}; + SlotTester(Element* in_mem, size_t in_head) : + mem(in_mem), head(in_head){}; const Element* mem = nullptr; size_t head = 0; @@ -337,7 +336,7 @@ namespace vsg static inline size_t computeMaxiumAllocationSize(size_t blockSize, size_t alignment) { - return std::min(blockSize - alignment, size_t((1<<15)-2) * sizeof(Element)); + return std::min(blockSize - alignment, size_t((1 << 15) - 2) * sizeof(Element)); } }; @@ -365,6 +364,4 @@ namespace vsg std::map> largeAllocations; }; - - } // namespace vsg diff --git a/include/vsg/utils/FindDynamicObjects.h b/include/vsg/utils/FindDynamicObjects.h index 2e1427256..2ac97e0ec 100644 --- a/include/vsg/utils/FindDynamicObjects.h +++ b/include/vsg/utils/FindDynamicObjects.h @@ -24,7 +24,6 @@ namespace vsg class VSG_DECLSPEC FindDynamicObjects : public Inherit { public: - std::mutex mutex; std::set dynamicObjects; diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 6b9a1cb19..28cd41a31 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -12,9 +12,9 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI #include #include +#include #include #include -#include #include #include @@ -24,8 +24,10 @@ using namespace vsg; vsg::Allocator* createAllocator(const char* env) { - if (vsg::getEnv(env)=="NEW") return new IntrusiveAllocator(); - else return new OriginalBlockAllocator(); + if (vsg::getEnv(env) == "NEW") + return new IntrusiveAllocator(); + else + return new OriginalBlockAllocator(); } std::unique_ptr& Allocator::instance() @@ -59,7 +61,6 @@ OriginalBlockAllocator::~OriginalBlockAllocator() { } - void OriginalBlockAllocator::report(std::ostream& out) const { out << "OriginalBlockAllocator::report() " << allocatorMemoryBlocks.size() << std::endl; @@ -523,7 +524,7 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t blockAlignment = std::max(blockAlignment, size_t{16}); // round blockSize up to nearest aligned size - blockSize = ((blockSize+alignment-1) / alignment) * alignment; + blockSize = ((blockSize + alignment - 1) / alignment) * alignment; memory = static_cast(operator new (blockSize, std::align_val_t{blockAlignment})); memoryEnd = memory + blockSize / sizeof(Element); @@ -538,7 +539,7 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t freeLists.emplace_back(); FreeList& freeList = freeLists.front(); freeList.count = 0; - freeList.head = static_cast(((1 + elementAlignment)/elementAlignment) * elementAlignment - 1); + freeList.head = static_cast(((1 + elementAlignment) / elementAlignment) * elementAlignment - 1); maximumAllocationSize = computeMaxiumAllocationSize(blockSize, alignment); // mark the first element as 0. @@ -546,28 +547,28 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t size_t previous_position = 0; // 0 marks the beginning of the free list size_t position = freeList.head; - for(; position < capacity;) + for (; position < capacity;) { size_t aligned_start = ((position + max_slot_size) / elementAlignment) * elementAlignment; - size_t next_position = std::min(aligned_start-1, capacity); + size_t next_position = std::min(aligned_start - 1, capacity); - memory[position] = Element{ (previous_position == 0) ? 0 : (position - previous_position), next_position - position, 1 }; - memory[position+1].index = static_cast(previous_position); - memory[position+2].index = static_cast((next_position < capacity) ? next_position : 0); + memory[position] = Element{(previous_position == 0) ? 0 : (position - previous_position), next_position - position, 1}; + memory[position + 1].index = static_cast(previous_position); + memory[position + 2].index = static_cast((next_position < capacity) ? next_position : 0); previous_position = position; position = next_position; ++freeList.count; } #if DEBUG_ALLOCATOR - std::cout<<"IntrusiveAllocator::MemoryBlock::MemoryBlock("<(slot.next); - if (slot.next==0) + if (slot.next == 0) { - std::cerr<<"Warn: IntrusiveAllocator::MemoryBlock::allocate("<(slot.previous)<<", "<(slot.next)<<", "<(slot.status)<<" }"<(slot.previous) << ", " << static_cast(slot.next) << ", " << static_cast(slot.status) << " }" << std::endl; } Element::Index nextPosition = freePosition + static_cast(slotSpace); @@ -634,33 +635,33 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) Element::Index nextAlignedStart = static_cast(((freePosition + 1 + numElementsToBeUsed + elementAlignment) / elementAlignment) * elementAlignment); Element::Index minimumAlignedEnd = nextAlignedStart + static_cast(minimumNumElementsInSlot); #if DEBUG_ALLOCATOR - std::cout<<"allocating, size = "<(slot.status)<(slot.status) << std::endl; + else + std::cout << "ERROR detected after IntrusiveAllocator::MemoryBlock::allocate(" << size << ") " << this << " allocated = " << &memory[freePosition + 1] << std::endl; #endif - return &memory[freePosition+1]; + return &memory[freePosition + 1]; } freePosition = nextFreePosition; @@ -719,7 +721,7 @@ void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) } #if DEBUG_ALLOCATOR - std::cout<<"IntrusiveAllocator::MemoryBlock::allocator("<(entry.slot.status)<<" } "; - if (entry.slot.status != 0) out<<" previous free = "<(entry.slot.status) << " } "; + if (entry.slot.status != 0) + out << " previous free = " << entry.previousFree << ", next free = " << entry.nextFree << std::endl; + else + out << std::endl; } } @@ -764,23 +769,23 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ #if DEBUG_ALLOCATOR if (validate()) { - std::cout<<"IntrusiveAllocator::MemoryBlock::deallocate("<(slot.status)<(slot.status) << std::endl; } else { - std::cout<<"ERROR detected befpre IntrusiveAllocator::MemoryBlock::deallocate("< void - { + auto mergePCN = [&]() -> void { #if DEBUG_ALLOCATOR SlotTester before(memory, freeList.head); before.slot(P, "P"); @@ -834,13 +838,12 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ memory[P].next += memory[C].next + memory[N].next; if (NN != 0) memory[NN].previous = memory[P].next; - // update freeList linked list entries if (PNF == N) // also implies NPF == P { // case 1. in order sequential #if DEBUG_ALLOCATOR - std::cout<<" case 1. in order sequential"< void - { + auto mergePC = [&]() -> void { // update slots for the merge memory[P].next += memory[C].next; if (N != 0) memory[N].previous = memory[P].next; - // freeList linked list entries will not need updating. + // freeList linked list entries will not need updating. #if DEBUG_ALLOCATOR if (!validate()) { - std::cout<<"ERROR detected after mergePC() IntrusiveAllocator::MemoryBlock::deallocate("< void - { + auto mergeCN = [&]() -> void { // update slots for merge memory[C].status = 1; memory[C].next += memory[N].next; @@ -936,21 +938,20 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ #if DEBUG_ALLOCATOR if (!validate()) { - std::cout<<"ERROR detected after mergeCN() IntrusiveAllocator::MemoryBlock::deallocate("< void - { + auto standalone = [&]() -> void { memory[C].status = 1; memory[C + 1].index = 0; memory[C + 2].index = freeList.head; if (freeList.head != 0) { - memory[freeList.head + 1] = C; // set previous heads previousFree to C. + memory[freeList.head + 1] = C; // set previous heads previousFree to C. } // set the head to C. @@ -962,7 +963,7 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ #if DEBUG_ALLOCATOR if (!validate()) { - std::cout<<"ERROR detected after standalone() IntrusiveAllocator::MemoryBlock::deallocate("< allocated; std::set available; - while(position < capacity) + while (position < capacity) { auto& slot = memory[position]; if (slot.previous > capacity || slot.next > capacity) { - std::cerr<<"IntrusiveAllocator::MemoryBlock::validate() "< position) { - std::cerr<<"IntrusiveAllocator::MemoryBlock::validate() "<(slot.status)<<"}"<(slot.status) << "}" << std::endl; return false; } @@ -1099,11 +1110,11 @@ bool IntrusiveAllocator::MemoryBlock::validate() const std::set inFreeList; // std::cout<<"No invalid entries found"<(slot.status)<<"}"<(slot.status) << "}" << std::endl; return false; } - if (memory[freePosition+1].index != previousPosition || memory[freePosition+1].index == freePosition) + if (memory[freePosition + 1].index != previousPosition || memory[freePosition + 1].index == freePosition) { - std::cerr<<"IntrusiveAllocator::MemoryBlock::validate() "<(slot.status)<<"} previousFree = "<(slot.status) << "} previousFree = " << memory[freePosition + 1].index << ", nextFree = " << memory[freePosition + 2].index << std::endl; return false; } previousPosition = freePosition; - freePosition = memory[freePosition+2].index; + freePosition = memory[freePosition + 2].index; } } - if (available.size() != inFreeList.size()) { - std::cerr<<"IntrusiveAllocator::MemoryBlock::validate() "< in_nestedAlloc IntrusiveAllocator::~IntrusiveAllocator() { - std::cout<<"IntrusiveAllocator::~IntrusiveAllocator() largeAllocations.size() = "<second.first}); + operator delete (ptr, std::align_val_t{la_itr->second.first}); largeAllocations.erase(la_itr); return true; } @@ -1358,16 +1367,31 @@ bool IntrusiveAllocator::deallocate(void* ptr, std::size_t size) bool IntrusiveAllocator::validate() const { bool valid = true; - for(auto& memoryBlock : allocatorMemoryBlocks) + for (auto& memoryBlock : allocatorMemoryBlocks) { - valid = memoryBlock->validate() && valid ; + valid = memoryBlock->validate() && valid; } return valid; } - -size_t IntrusiveAllocator::deleteEmptyMemoryBlocks() { vsg::info("IntrusiveAllocator::deleteEmptyMemoryBlocks(..) TODO"); return 0; } -size_t IntrusiveAllocator::totalAvailableSize() const { vsg::info("IntrusiveAllocator::totalAvailableSize(..) TODO"); return 0; } -size_t IntrusiveAllocator::totalReservedSize() const { vsg::info("IntrusiveAllocator::totalReservedSize(..) TODO"); return 0; } -size_t IntrusiveAllocator::totalMemorySize() const { vsg::info("IntrusiveAllocator::totalMemorySize(..) TODO"); return 0; } +size_t IntrusiveAllocator::deleteEmptyMemoryBlocks() +{ + vsg::info("IntrusiveAllocator::deleteEmptyMemoryBlocks(..) TODO"); + return 0; +} +size_t IntrusiveAllocator::totalAvailableSize() const +{ + vsg::info("IntrusiveAllocator::totalAvailableSize(..) TODO"); + return 0; +} +size_t IntrusiveAllocator::totalReservedSize() const +{ + vsg::info("IntrusiveAllocator::totalReservedSize(..) TODO"); + return 0; +} +size_t IntrusiveAllocator::totalMemorySize() const +{ + vsg::info("IntrusiveAllocator::totalMemorySize(..) TODO"); + return 0; +} void IntrusiveAllocator::setMemoryTracking(int) { vsg::info("IntrusiveAllocator::setMemoryTracking(..) TODO"); } diff --git a/src/vsg/utils/ShaderCompiler.cpp b/src/vsg/utils/ShaderCompiler.cpp index 2ca86f87a..1707057dd 100644 --- a/src/vsg/utils/ShaderCompiler.cpp +++ b/src/vsg/utils/ShaderCompiler.cpp @@ -20,9 +20,9 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI #include #if VSG_SUPPORTS_ShaderCompiler -# include # include # include +# include #endif #include From 009cba4c7d11dd57141e712f9cc80c45f8710a76 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 20 Jun 2024 14:08:38 +0100 Subject: [PATCH 24/43] Changed the InstrusiveAllocator to be the default. --- src/vsg/core/Allocator.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 28cd41a31..f667a1799 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -24,10 +24,10 @@ using namespace vsg; vsg::Allocator* createAllocator(const char* env) { - if (vsg::getEnv(env) == "NEW") - return new IntrusiveAllocator(); - else + if (vsg::getEnv(env) == "OLD") return new OriginalBlockAllocator(); + else + return new IntrusiveAllocator(); } std::unique_ptr& Allocator::instance() From 095faaf2fb0ae2b04375c92a661af4ffd0f4770c Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 20 Jun 2024 14:56:54 +0100 Subject: [PATCH 25/43] Removed OriginalBlockAllocator --- include/vsg/core/Allocator.h | 105 ++------ src/vsg/core/Allocator.cpp | 461 +---------------------------------- 2 files changed, 23 insertions(+), 543 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index 981445389..c44b1cb87 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -137,89 +137,28 @@ namespace vsg template using allocator_affinity_physics = allocator_affinity_adapter; - ///// - - class VSG_DECLSPEC OriginalBlockAllocator : public Allocator - { - public: - explicit OriginalBlockAllocator(size_t in_default_alignment = 4); - explicit OriginalBlockAllocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4); - - virtual ~OriginalBlockAllocator(); - - /// allocate from the pool of memory blocks, or allocate from a new memory block - void* allocate(std::size_t size, AllocatorAffinity allocatorAffinity = ALLOCATOR_AFFINITY_OBJECTS) override; - - /// deallocate, returning data to pool. - bool deallocate(void* ptr, std::size_t size) override; - - /// delete any MemoryBlock that are empty - size_t deleteEmptyMemoryBlocks() override; - - /// return the total available size of allocated MemoryBlocks - size_t totalAvailableSize() const override; - - /// return the total reserved size of allocated MemoryBlocks - size_t totalReservedSize() const override; - - /// return the total memory size of allocated MemoryBlocks - size_t totalMemorySize() const override; - - /// report stats about blocks of memory allocated. - void report(std::ostream& out) const override; - - /// set the MemoryTracking member of the vsg::Allocator and all the MemoryBlocks that it manages. - void setMemoryTracking(int mt) override; - - void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; - - protected: - struct MemoryBlock - { - MemoryBlock(size_t blockSize, int memoryTracking, size_t in_alignment); - virtual ~MemoryBlock(); - - void* allocate(std::size_t size); - bool deallocate(void* ptr, std::size_t size); - - vsg::MemorySlots memorySlots; - size_t alignment = 4; - size_t block_alignment = 16; - uint8_t* memory = nullptr; - }; - - struct MemoryBlocks - { - OriginalBlockAllocator* parent = nullptr; - std::string name; - size_t blockSize = 0; - size_t alignment = 4; - std::map> memoryBlocks; - std::shared_ptr latestMemoryBlock; - - MemoryBlocks(OriginalBlockAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment); - virtual ~MemoryBlocks(); - - void* allocate(std::size_t size); - bool deallocate(void* ptr, std::size_t size); - - size_t deleteEmptyMemoryBlocks(); - size_t totalAvailableSize() const; - size_t totalReservedSize() const; - size_t totalMemorySize() const; - }; - - MemoryBlocks* getMemoryBlocks(AllocatorAffinity allocatorAffinity); - - MemoryBlocks* getOrCreateMemoryBlocks(AllocatorAffinity allocatorAffinity, const std::string& name, size_t blockSize, size_t in_alignment = 4); - - double allocationTime = 0.0; - double deallocationTime = 0.0; - - protected: - std::vector> allocatorMemoryBlocks; - }; - + //////////////////////////////////////////////////////////////////////////////////////////////////// + // + // InstrusiveAllocator is the default Allocator implenentation + // + // Memory is allocated for fixed sized blocks, with indexing of allocated and available slots of memory + // are stored within the same memory block that user memory allocation are made from. The memory block + // is created a contiguous block of 4 bytes Elements, where the Element is a union of bitfield linked list + // market the beginning of the previous slot or the begging of the next, the status of whether the slot is + // allocated or available, or an index when used as part of doubling linked list of free slots. + // + // The block allocation is done based on the type of object so all nodes, data or general objects are + // allocated within the blocks containing objects of similar type. This form of block allocation helps + // scene graph traversal speeds by improving cache coherency/reducing cache missing as it ensures that + // nodes etc. are packed in adjacent memory. + // + // The instrusive indexing means there is only a 4 byte panalty for each memory allocation, and a minimum + // memory use per allocation of 12 bytes (3 Elements - 1 for the slot{previous, next, status} and 2 for the + // previous and next free list indices. + // + // The maximum size of allocations within the block allocation is (2^15-2) * 4, allocations larger than this + // are allocated using aligned versions of std::new and std::delete. + // class VSG_DECLSPEC IntrusiveAllocator : public Allocator { public: diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index f667a1799..b9b369940 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -22,471 +22,12 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI using namespace vsg; -vsg::Allocator* createAllocator(const char* env) -{ - if (vsg::getEnv(env) == "OLD") - return new OriginalBlockAllocator(); - else - return new IntrusiveAllocator(); -} - +static std::unique_ptr s_allocator(new IntrusiveAllocator()); std::unique_ptr& Allocator::instance() { - static std::unique_ptr s_allocator(createAllocator("VSG_ALLOCATOR")); return s_allocator; } -//////////////////////////////////////////////////////////////////////////////////////////////////// -// -// vsg::OriginalBlockAllocator -// -OriginalBlockAllocator::OriginalBlockAllocator(size_t in_default_alignment) : - Allocator(in_default_alignment) -{ - allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); - - size_t Megabyte = 1024 * 1024; - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "MemoryBlocks_OBJECTS", size_t(Megabyte), default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "MemoryBlocks_DATA", size_t(16 * Megabyte), default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "MemoryBlocks_NODES", size_t(Megabyte), default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "MemoryBlocks_PHYSICS", size_t(Megabyte), 16)); -} - -OriginalBlockAllocator::OriginalBlockAllocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment) : - Allocator(std::move(in_nestedAllocator), in_default_alignment) -{ -} - -OriginalBlockAllocator::~OriginalBlockAllocator() -{ -} - -void OriginalBlockAllocator::report(std::ostream& out) const -{ - out << "OriginalBlockAllocator::report() " << allocatorMemoryBlocks.size() << std::endl; - out << "allocatorType = " << allocatorType << std::endl; - out << "totalAvailableSize = " << totalAvailableSize() << ", totalReservedSize = " << totalReservedSize() << ", totalMemorySize = " << totalMemorySize() << std::endl; - double totalReserved = static_cast(totalReservedSize()); - - std::scoped_lock lock(mutex); - for (const auto& memoryBlocks : allocatorMemoryBlocks) - { - if (memoryBlocks) - { - size_t totalForBlock = memoryBlocks->totalReservedSize(); - out << memoryBlocks->name << " used = " << totalForBlock; - if (totalReserved > 0.0) - { - out << ", " << (double(totalForBlock) / totalReserved) * 100.0 << "% of total used."; - } - out << std::endl; - } - } - - for (const auto& memoryBlocks : allocatorMemoryBlocks) - { - if (memoryBlocks) - { - out << memoryBlocks->name << " " << memoryBlocks->memoryBlocks.size() << " blocks"; - for (const auto& value : memoryBlocks->memoryBlocks) - { - const auto& memorySlots = value.second->memorySlots; - out << " [used = " << memorySlots.totalReservedSize() << ", avail = " << memorySlots.maximumAvailableSpace() << "]"; - } - out << std::endl; - } - } -} - -void* OriginalBlockAllocator::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) -{ - std::scoped_lock lock(mutex); - - // create a MemoryBlocks entry if one doesn't already exist - if (allocatorAffinity > allocatorMemoryBlocks.size()) - { - if (memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("OriginalBlockAllocator::allocate(", size, ", ", allocatorAffinity, ") out of bounds allocating new MemoryBlock"); - } - - auto name = make_string("MemoryBlocks_", allocatorAffinity); - size_t blockSize = 1024 * 1024; // Megabyte - - allocatorMemoryBlocks.resize(allocatorAffinity + 1); - allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, name, blockSize, default_alignment)); - } - - auto& memoryBlocks = allocatorMemoryBlocks[allocatorAffinity]; - if (memoryBlocks) - { - auto mem_ptr = memoryBlocks->allocate(size); - if (mem_ptr) - { - if (memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("Allocated from MemoryBlock mem_ptr = ", mem_ptr, ", size = ", size, ", allocatorAffinity = ", int(allocatorAffinity)); - } - return mem_ptr; - } - } - - void* ptr = OriginalBlockAllocator::allocate(size, allocatorAffinity); - if (memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("OriginalBlockAllocator::allocate(", size, ", ", int(allocatorAffinity), ") ptr = ", ptr); - } - return ptr; -} - -bool OriginalBlockAllocator::deallocate(void* ptr, std::size_t size) -{ - std::scoped_lock lock(mutex); - - for (auto& memoryBlocks : allocatorMemoryBlocks) - { - if (memoryBlocks) - { - if (memoryBlocks->deallocate(ptr, size)) - { - if (memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("Deallocated from MemoryBlock ", ptr); - } - return true; - } - } - } - - if (nestedAllocator && nestedAllocator->deallocate(ptr, size)) return true; - - if (allocatorType == ALLOCATOR_TYPE_NEW_DELETE) - { - operator delete(ptr); - return true; - } - else if (allocatorType == ALLOCATOR_TYPE_MALLOC_FREE) - { - std::free(ptr); - return true; - } - - return false; -} - -size_t OriginalBlockAllocator::deleteEmptyMemoryBlocks() -{ - std::scoped_lock lock(mutex); - - size_t memoryDeleted = 0; - for (auto& memoryBlocks : allocatorMemoryBlocks) - { - if (memoryBlocks) memoryDeleted += memoryBlocks->deleteEmptyMemoryBlocks(); - } - return memoryDeleted; -} - -size_t OriginalBlockAllocator::totalAvailableSize() const -{ - std::scoped_lock lock(mutex); - - size_t size = 0; - for (auto& memoryBlocks : allocatorMemoryBlocks) - { - if (memoryBlocks) size += memoryBlocks->totalAvailableSize(); - } - return size; -} - -size_t OriginalBlockAllocator::totalReservedSize() const -{ - std::scoped_lock lock(mutex); - - size_t size = 0; - for (auto& memoryBlocks : allocatorMemoryBlocks) - { - if (memoryBlocks) size += memoryBlocks->totalReservedSize(); - } - return size; -} - -size_t OriginalBlockAllocator::totalMemorySize() const -{ - std::scoped_lock lock(mutex); - - size_t size = 0; - for (auto& memoryBlocks : allocatorMemoryBlocks) - { - if (memoryBlocks) size += memoryBlocks->totalMemorySize(); - } - return size; -} - -OriginalBlockAllocator::MemoryBlocks* OriginalBlockAllocator::getMemoryBlocks(AllocatorAffinity allocatorAffinity) -{ - std::scoped_lock lock(mutex); - - if (size_t(allocatorAffinity) < allocatorMemoryBlocks.size()) return allocatorMemoryBlocks[allocatorAffinity].get(); - return {}; -} - -OriginalBlockAllocator::MemoryBlocks* OriginalBlockAllocator::getOrCreateMemoryBlocks(AllocatorAffinity allocatorAffinity, const std::string& name, size_t blockSize, size_t alignment) -{ - std::scoped_lock lock(mutex); - - if (size_t(allocatorAffinity) < allocatorMemoryBlocks.size()) - { - allocatorMemoryBlocks[allocatorAffinity]->name = name; - allocatorMemoryBlocks[allocatorAffinity]->blockSize = blockSize; - allocatorMemoryBlocks[allocatorAffinity]->alignment = alignment; - } - else - { - allocatorMemoryBlocks.resize(allocatorAffinity + 1); - allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, name, blockSize, alignment)); - } - return allocatorMemoryBlocks[allocatorAffinity].get(); -} - -void OriginalBlockAllocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) -{ - std::scoped_lock lock(mutex); - - if (size_t(allocatorAffinity) < allocatorMemoryBlocks.size()) - { - allocatorMemoryBlocks[allocatorAffinity]->blockSize = blockSize; - } - else - { - auto name = make_string("MemoryBlocks_", allocatorAffinity); - - allocatorMemoryBlocks.resize(allocatorAffinity + 1); - allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, name, blockSize, default_alignment)); - } -} - -void OriginalBlockAllocator::setMemoryTracking(int mt) -{ - memoryTracking = mt; - for (auto& amb : allocatorMemoryBlocks) - { - if (amb) - { - for (auto& value : amb->memoryBlocks) - { - value.second->memorySlots.memoryTracking = mt; - } - } - } -} - -//////////////////////////////////////////////////////////////////////////////////////////////////// -// -// vsg::OriginalBlockAllocator::MemoryBlock -// -OriginalBlockAllocator::MemoryBlock::MemoryBlock(size_t blockSize, int memoryTracking, size_t in_alignment) : - memorySlots(blockSize, memoryTracking), - alignment(in_alignment) -{ - block_alignment = std::max(alignment, alignof(std::max_align_t)); - block_alignment = std::max(block_alignment, size_t{16}); - - memory = static_cast(operator new (blockSize, std::align_val_t{block_alignment})); - - if (memorySlots.memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("MemoryBlock(", blockSize, ") allocated memory"); - } -} - -OriginalBlockAllocator::MemoryBlock::~MemoryBlock() -{ - if (memorySlots.memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("MemoryBlock::~MemoryBlock(", memorySlots.totalMemorySize(), ") freed memory"); - } - - operator delete (memory, std::align_val_t{block_alignment}); -} - -void* OriginalBlockAllocator::MemoryBlock::allocate(std::size_t size) -{ - auto [allocated, offset] = memorySlots.reserve(size, alignment); - if (allocated) - return memory + offset; - else - return nullptr; -} - -bool OriginalBlockAllocator::MemoryBlock::deallocate(void* ptr, std::size_t size) -{ - if (ptr >= memory) - { - size_t offset = static_cast(ptr) - memory; - if (offset < memorySlots.totalMemorySize()) - { - if (!memorySlots.release(offset, size)) - { - warn("OriginalBlockAllocator::MemoryBlock::deallocate(", ptr, ") problem - couldn't release"); - } - return true; - } - } - return false; -} - -//////////////////////////////////////////////////////////////////////////////////////////////////// -// -// vsg::OriginalBlockAllocator::MemoryBlocks -// -OriginalBlockAllocator::MemoryBlocks::MemoryBlocks(OriginalBlockAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment) : - parent(in_parent), - name(in_name), - blockSize(in_blockSize), - alignment(in_alignment) -{ - if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("OriginalBlockAllocator::MemoryBlocks::MemoryBlocks(", parent, ", ", name, ", ", blockSize, ")"); - } -} - -OriginalBlockAllocator::MemoryBlocks::~MemoryBlocks() -{ - if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("MemoryBlocks::~MemoryBlocks() name = ", name, ", ", memoryBlocks.size()); - } -} - -void* OriginalBlockAllocator::MemoryBlocks::allocate(std::size_t size) -{ - if (latestMemoryBlock) - { - auto ptr = latestMemoryBlock->allocate(size); - if (ptr) return ptr; - } - - // search existing blocks from last to first for space for the required memory allocation. - for (auto itr = memoryBlocks.rbegin(); itr != memoryBlocks.rend(); ++itr) - { - auto& block = itr->second; - if (block != latestMemoryBlock) - { - auto ptr = block->allocate(size); - if (ptr) return ptr; - } - } - - size_t new_blockSize = std::max(size, blockSize); - - auto block = std::make_shared(new_blockSize, parent->memoryTracking, alignment); - latestMemoryBlock = block; - - auto ptr = block->allocate(size); - - memoryBlocks[block->memory] = std::move(block); - - if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("OriginalBlockAllocator::MemoryBlocks::allocate(", size, ") MemoryBlocks.name = ", name, ", allocated in new MemoryBlock ", parent->memoryTracking); - } - - return ptr; -} - -bool OriginalBlockAllocator::MemoryBlocks::deallocate(void* ptr, std::size_t size) -{ - if (memoryBlocks.empty()) return false; - - auto itr = memoryBlocks.upper_bound(ptr); - if (itr != memoryBlocks.end()) - { - if (itr != memoryBlocks.begin()) - { - --itr; - auto& block = itr->second; - if (block->deallocate(ptr, size)) return true; - } - else - { - auto& block = itr->second; - if (block->deallocate(ptr, size)) return true; - } - } - else - { - auto& block = memoryBlocks.rbegin()->second; - if (block->deallocate(ptr, size)) return true; - } - - if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("MemoryBlocks:deallocate() MemoryBlocks.name = ", name, ", couldn't locate pointer to deallocate ", ptr); - } - return false; -} - -size_t OriginalBlockAllocator::MemoryBlocks::deleteEmptyMemoryBlocks() -{ - size_t memoryDeleted = 0; - if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info("MemoryBlocks:deleteEmptyMemoryBlocks() MemoryBlocks.name = ", name); - } - - auto itr = memoryBlocks.begin(); - while (itr != memoryBlocks.end()) - { - auto& block = itr->second; - if (block->memorySlots.empty()) - { - if (parent->memoryTracking & MEMORY_TRACKING_REPORT_ACTIONS) - { - info(" MemoryBlocks:deleteEmptyMemoryBlocks() MemoryBlocks.name = ", name, ", removing MemoryBlock", block.get()); - } - if (block == latestMemoryBlock) latestMemoryBlock = nullptr; - memoryDeleted += block->memorySlots.totalMemorySize(); - itr = memoryBlocks.erase(itr); - } - else - { - ++itr; - } - } - return memoryDeleted; -} - -size_t OriginalBlockAllocator::MemoryBlocks::totalAvailableSize() const -{ - size_t size = 0; - for (auto& value : memoryBlocks) - { - size += value.second->memorySlots.totalAvailableSize(); - } - return size; -} - -size_t OriginalBlockAllocator::MemoryBlocks::totalReservedSize() const -{ - size_t size = 0; - for (auto& value : memoryBlocks) - { - size += value.second->memorySlots.totalReservedSize(); - } - return size; -} - -size_t OriginalBlockAllocator::MemoryBlocks::totalMemorySize() const -{ - size_t size = 0; - for (auto& value : memoryBlocks) - { - size += value.second->memorySlots.totalMemorySize(); - } - return size; -} - //////////////////////////////////////////////////////////////////////////////////////////////////// // // vsg::allocate and vsg::deallocate convenience functions that map to using the OriginalBlockAllocator singleton. From e405b1ac93785ff01718e0a94cfd53f50862c966 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 20 Jun 2024 16:14:05 +0100 Subject: [PATCH 26/43] Removed Allocator::setMemoryTracking(..) as it's no longer appropriate for the InstrusiveAllocator. Cleaned up case of member. --- include/vsg/core/Allocator.h | 17 +++++------------ src/vsg/core/Allocator.cpp | 19 +++++++++---------- 2 files changed, 14 insertions(+), 22 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index c44b1cb87..dad3a6bd9 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -41,10 +41,10 @@ namespace vsg class VSG_DECLSPEC Allocator { public: - explicit Allocator(size_t in_default_alignment = 4) : - default_alignment(in_default_alignment) {} - explicit Allocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4) : - default_alignment(in_default_alignment), nestedAllocator(std::move(in_nestedAllocator)) {} + explicit Allocator(size_t in_defaultAlignment = 4) : + defaultAlignment(in_defaultAlignment) {} + explicit Allocator(std::unique_ptr in_nestedAllocator, size_t in_defaultAlignment = 4) : + defaultAlignment(in_defaultAlignment), nestedAllocator(std::move(in_nestedAllocator)) {} virtual ~Allocator() {} /// Allocator singleton @@ -69,10 +69,6 @@ namespace vsg virtual size_t totalMemorySize() const = 0; AllocatorType allocatorType = ALLOCATOR_TYPE_VSG_ALLOCATOR; // use MemoryBlocks by default - int memoryTracking = MEMORY_TRACKING_DEFAULT; - - /// set the MemoryTracking member of the vsg::Allocator and all the MemoryBlocks that it manages. - virtual void setMemoryTracking(int mt) = 0; virtual void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) = 0; @@ -80,9 +76,7 @@ namespace vsg virtual void report(std::ostream& out) const = 0; mutable std::mutex mutex; - size_t default_alignment = 4; - double allocationTime = 0.0; - double deallocationTime = 0.0; + size_t defaultAlignment = 4; protected: // if you are assigning a custom allocator you must retain the old allocator to manage the memory it allocated and needs to delete @@ -178,7 +172,6 @@ namespace vsg size_t totalAvailableSize() const override; size_t totalReservedSize() const override; size_t totalMemorySize() const override; - void setMemoryTracking(int mt) override; void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; protected: diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index b9b369940..a3ae34e62 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -769,15 +769,15 @@ IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAlloc { std::cout << "IntrusiveAllocator::IntrusiveAllocator()" << std::endl; - default_alignment = 4; + defaultAlignment = 4; size_t Megabyte = size_t(1024) * size_t(1024); size_t blockSize = size_t(1) * Megabyte; allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, defaultAlignment)); allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); } @@ -799,7 +799,7 @@ void IntrusiveAllocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_ auto name = vsg::make_string("MemoryBlocks_", allocatorAffinity); allocatorMemoryBlocks.resize(allocatorAffinity + 1); - allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, name, blockSize, default_alignment)); + allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, name, blockSize, defaultAlignment)); } } @@ -824,7 +824,7 @@ void* IntrusiveAllocator::allocate(std::size_t size, AllocatorAffinity allocator { size_t blockSize = 1024 * 1024; // Megabyte allocatorMemoryBlocks.resize(allocatorAffinity + 1); - allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, "MemoryBlockAffinity", blockSize, default_alignment)); + allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, "MemoryBlockAffinity", blockSize, defaultAlignment)); } void* ptr = nullptr; @@ -845,9 +845,9 @@ void* IntrusiveAllocator::allocate(std::size_t size, AllocatorAffinity allocator return ptr; } - ptr = operator new (size, std::align_val_t{default_alignment}); - if (ptr) largeAllocations[ptr] = std::pair(default_alignment, size); - //std::cout<<"IntrusiveAllocator::allocate() default aligned large allocation = "< Date: Mon, 24 Jun 2024 10:04:49 +0100 Subject: [PATCH 27/43] Reduced debug message --- src/vsg/core/Allocator.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index a3ae34e62..bf935a2e5 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -783,7 +783,7 @@ IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAlloc IntrusiveAllocator::~IntrusiveAllocator() { - std::cout << "IntrusiveAllocator::~IntrusiveAllocator() largeAllocations.size() = " << largeAllocations.size() << std::endl; + std::cout << "IntrusiveAllocator::~IntrusiveAllocator()" << std::endl; } void IntrusiveAllocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) From e2771797274f1ebaf80e34366cc17b3edeacb1d8 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 25 Jun 2024 11:01:35 +0100 Subject: [PATCH 28/43] Standardized the constructor --- include/vsg/core/Allocator.h | 3 ++- src/vsg/core/Allocator.cpp | 25 ++++++++++++++++++------- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index dad3a6bd9..a1e9664cf 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -156,7 +156,8 @@ namespace vsg class VSG_DECLSPEC IntrusiveAllocator : public Allocator { public: - IntrusiveAllocator(std::unique_ptr in_nestedAllocator = {}); + explicit IntrusiveAllocator(size_t in_default_alignment = 4); + explicit IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4); ~IntrusiveAllocator(); diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index bf935a2e5..4478e4b9b 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -764,20 +764,31 @@ bool IntrusiveAllocator::MemoryBlocks::validate() const // // IntrusiveAllocator // -IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAllocator) : - Allocator(std::move(in_nestedAllocator)) +IntrusiveAllocator::IntrusiveAllocator(size_t in_default_alignment) : + Allocator(in_default_alignment) { - std::cout << "IntrusiveAllocator::IntrusiveAllocator()" << std::endl; + size_t Megabyte = size_t(1024) * size_t(1024); + size_t blockSize = size_t(1) * Megabyte; - defaultAlignment = 4; + allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); +} + +IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment) : + Allocator(std::move(in_nestedAllocator), in_default_alignment) +{ + std::cout << "IntrusiveAllocator::IntrusiveAllocator()" << std::endl; size_t Megabyte = size_t(1024) * size_t(1024); size_t blockSize = size_t(1) * Megabyte; allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, defaultAlignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, defaultAlignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, default_alignment)); allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); } From 123a47d18b740ecd35ba3ddb1ae0282d53220b83 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 25 Jun 2024 11:19:24 +0100 Subject: [PATCH 29/43] Fixed naming --- include/vsg/core/Allocator.h | 4 ++-- src/vsg/core/Allocator.cpp | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index a1e9664cf..61fd17be0 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -156,8 +156,8 @@ namespace vsg class VSG_DECLSPEC IntrusiveAllocator : public Allocator { public: - explicit IntrusiveAllocator(size_t in_default_alignment = 4); - explicit IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment = 4); + explicit IntrusiveAllocator(size_t in_defaultAlignment = 4); + explicit IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_defaultAlignment = 4); ~IntrusiveAllocator(); diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 4478e4b9b..5aef6952a 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -764,21 +764,21 @@ bool IntrusiveAllocator::MemoryBlocks::validate() const // // IntrusiveAllocator // -IntrusiveAllocator::IntrusiveAllocator(size_t in_default_alignment) : - Allocator(in_default_alignment) +IntrusiveAllocator::IntrusiveAllocator(size_t in_defaultAlignment) : + Allocator(in_defaultAlignment) { size_t Megabyte = size_t(1024) * size_t(1024); size_t blockSize = size_t(1) * Megabyte; allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, defaultAlignment)); allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); } -IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_default_alignment) : - Allocator(std::move(in_nestedAllocator), in_default_alignment) +IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_defaultAlignment) : + Allocator(std::move(in_nestedAllocator), in_defaultAlignment) { std::cout << "IntrusiveAllocator::IntrusiveAllocator()" << std::endl; @@ -786,9 +786,9 @@ IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAlloc size_t blockSize = size_t(1) * Megabyte; allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, default_alignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, default_alignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, defaultAlignment)); allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); } From 55bde312681ceb86b1ef5aa4588c88514c0b6c7d Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 25 Jun 2024 15:27:57 +0100 Subject: [PATCH 30/43] Removed debug messages --- src/vsg/core/Allocator.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 5aef6952a..3983da26c 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -780,8 +780,6 @@ IntrusiveAllocator::IntrusiveAllocator(size_t in_defaultAlignment) : IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_defaultAlignment) : Allocator(std::move(in_nestedAllocator), in_defaultAlignment) { - std::cout << "IntrusiveAllocator::IntrusiveAllocator()" << std::endl; - size_t Megabyte = size_t(1024) * size_t(1024); size_t blockSize = size_t(1) * Megabyte; @@ -794,7 +792,6 @@ IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAlloc IntrusiveAllocator::~IntrusiveAllocator() { - std::cout << "IntrusiveAllocator::~IntrusiveAllocator()" << std::endl; } void IntrusiveAllocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) From fc756e10cc483a82ae1ffcbfa0ec777287a733ad Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 26 Jun 2024 16:52:07 +0100 Subject: [PATCH 31/43] Implemented missing methods --- include/vsg/core/Allocator.h | 10 +++ src/vsg/core/Allocator.cpp | 142 +++++++++++++++++++++++++++++++---- 2 files changed, 139 insertions(+), 13 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index 61fd17be0..c0fd5f0cf 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -234,6 +234,7 @@ namespace vsg size_t blockAlignment = 16; size_t blockSize = 0; size_t maximumAllocationSize = 0; + size_t firstSlot = 1; std::vector freeLists; @@ -243,6 +244,10 @@ namespace vsg inline bool within(void* ptr) const { return memory <= ptr && ptr < memoryEnd; } + size_t totalAvailableSize() const; + size_t totalReservedSize() const; + size_t totalMemorySize() const; + struct SlotTester { SlotTester(Element* in_mem, size_t in_head) : @@ -290,6 +295,11 @@ namespace vsg void* allocate(std::size_t size); void report(std::ostream& out) const; bool validate() const; + + size_t deleteEmptyMemoryBlocks(); + size_t totalAvailableSize() const; + size_t totalReservedSize() const; + size_t totalMemorySize() const; }; std::vector> allocatorMemoryBlocks; diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 3983da26c..a56dc1af0 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -70,6 +70,7 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t memory = static_cast(operator new (blockSize, std::align_val_t{blockAlignment})); memoryEnd = memory + blockSize / sizeof(Element); capacity = blockSize / alignment; + firstSlot = static_cast(((1 + elementAlignment) / elementAlignment) * elementAlignment - 1); size_t max_slot_size = (1 << 15); @@ -80,14 +81,14 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t freeLists.emplace_back(); FreeList& freeList = freeLists.front(); freeList.count = 0; - freeList.head = static_cast(((1 + elementAlignment) / elementAlignment) * elementAlignment - 1); + freeList.head = firstSlot; maximumAllocationSize = computeMaxiumAllocationSize(blockSize, alignment); // mark the first element as 0. memory[0].index = 0; size_t previous_position = 0; // 0 marks the beginning of the free list - size_t position = freeList.head; + size_t position = firstSlot; for (; position < capacity;) { size_t aligned_start = ((position + max_slot_size) / elementAlignment) * elementAlignment; @@ -106,7 +107,8 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t std::cout << "blockSize = " << blockSize << std::endl; std::cout << "capacity = " << capacity << std::endl; - + std::cout << "totalReservedSize = " << totalReservedSize() << std::endl; + std::cout << "totalAvailableSize = " << totalAvailableSize() << std::endl; std::cout << "alignment = " << alignment << std::endl; std::cout << "elementAlignment = " << elementAlignment << std::endl; std::cout << "freeList.head = " << freeList.head << std::endl; @@ -557,8 +559,11 @@ void IntrusiveAllocator::MemoryBlock::report(std::ostream& out) const out << " blockAlignment = " << blockAlignment << std::endl; out << " blockSize = " << blockSize << ", memory = " << static_cast(memory) << std::endl; out << " maximumAllocationSize = " << maximumAllocationSize << std::endl; + out << " firstSlot = "<< firstSlot<deleteEmptyMemoryBlocks(); + } + return count; } + size_t IntrusiveAllocator::totalAvailableSize() const { - vsg::info("IntrusiveAllocator::totalAvailableSize(..) TODO"); - return 0; + size_t count = 0; + for(auto& blocks : allocatorMemoryBlocks) + { + count += blocks->totalAvailableSize(); + } + return count; } + size_t IntrusiveAllocator::totalReservedSize() const { - vsg::info("IntrusiveAllocator::totalReservedSize(..) TODO"); - return 0; + size_t count = 0; + for(auto& blocks : allocatorMemoryBlocks) + { + count += blocks->totalReservedSize(); + } + return count; } + size_t IntrusiveAllocator::totalMemorySize() const { - vsg::info("IntrusiveAllocator::totalMemorySize(..) TODO"); - return 0; + size_t count = 0; + for(auto& blocks : allocatorMemoryBlocks) + { + count += blocks->totalMemorySize(); + } + return count; } From 8f4b1bc7a5173277761e7bbaccb91f26ab0906bb Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 26 Jun 2024 18:22:53 +0100 Subject: [PATCH 32/43] Added observer_ptr to Device to enable sharing between Context when compiling --- include/vsg/vk/Device.h | 5 +++++ include/vsg/vk/MemoryBufferPools.h | 1 + src/vsg/vk/Context.cpp | 25 +++++++++++++++++++++++-- src/vsg/vk/Device.cpp | 1 + 4 files changed, 30 insertions(+), 2 deletions(-) diff --git a/include/vsg/vk/Device.h b/include/vsg/vk/Device.h index be78f4b98..e2f7772c1 100644 --- a/include/vsg/vk/Device.h +++ b/include/vsg/vk/Device.h @@ -23,6 +23,7 @@ namespace vsg // forward declare class WindowTraits; + class MemoryBufferPools; struct QueueSetting { @@ -82,6 +83,10 @@ namespace vsg /// return true if Device was created with specified extension bool supportsDeviceExtension(const char* extensionName) const; + // provide observer_ptr to memory buffer pools so that these can be accessed when required + observer_ptr deviceMemoryBufferPools; + observer_ptr stagingMemoryBufferPools; + protected: virtual ~Device(); diff --git a/include/vsg/vk/MemoryBufferPools.h b/include/vsg/vk/MemoryBufferPools.h index 329672a4f..7f5911869 100644 --- a/include/vsg/vk/MemoryBufferPools.h +++ b/include/vsg/vk/MemoryBufferPools.h @@ -54,5 +54,6 @@ namespace vsg using BufferPools = std::vector>; BufferPools bufferPools; }; + VSG_type_name(vsg::MemoryBufferPools); } // namespace vsg diff --git a/src/vsg/vk/Context.cpp b/src/vsg/vk/Context.cpp index b07a517ba..3413fea2b 100644 --- a/src/vsg/vk/Context.cpp +++ b/src/vsg/vk/Context.cpp @@ -89,8 +89,6 @@ Context::Context(Device* in_device, const ResourceRequirements& in_resourceRequi deviceID(in_device->deviceID), device(in_device), resourceRequirements(in_resourceRequirements), - deviceMemoryBufferPools(MemoryBufferPools::create("Device_MemoryBufferPool", device, in_resourceRequirements)), - stagingMemoryBufferPools(MemoryBufferPools::create("Staging_MemoryBufferPool", device, in_resourceRequirements)), scratchBufferSize(0) { //semaphore = vsg::Semaphore::create(device); @@ -98,6 +96,29 @@ Context::Context(Device* in_device, const ResourceRequirements& in_resourceRequi minimum_maxSets = in_resourceRequirements.computeNumDescriptorSets(); minimum_descriptorPoolSizes = in_resourceRequirements.computeDescriptorPoolSizes(); + + deviceMemoryBufferPools = device->deviceMemoryBufferPools.ref_ptr(); + stagingMemoryBufferPools = device->stagingMemoryBufferPools.ref_ptr(); + + if (!deviceMemoryBufferPools) + { + device->deviceMemoryBufferPools = deviceMemoryBufferPools = MemoryBufferPools::create("Device_MemoryBufferPool", device, in_resourceRequirements); + vsg::info("Context::Context() creating new deviceMemoryBufferPools = ", deviceMemoryBufferPools); + } + else + { + vsg::info("Context::Context() reusing deviceMemoryBufferPools = ", deviceMemoryBufferPools); + } + + if (!stagingMemoryBufferPools) + { + device->stagingMemoryBufferPools = stagingMemoryBufferPools = MemoryBufferPools::create("Staging_MemoryBufferPool", device, in_resourceRequirements); + vsg::info("Context::Context() creating new stagingMemoryBufferPools = ", stagingMemoryBufferPools); + } + else + { + vsg::info("Context::Context() reusing stagingMemoryBufferPools = ", stagingMemoryBufferPools); + } } Context::Context(const Context& context) : diff --git a/src/vsg/vk/Device.cpp b/src/vsg/vk/Device.cpp index 85ae23ab8..96f9fe31b 100644 --- a/src/vsg/vk/Device.cpp +++ b/src/vsg/vk/Device.cpp @@ -15,6 +15,7 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI #include #include #include +#include #include #include From 063baf621ca9c9cf98b1121641e88af0f28f8087 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 27 Jun 2024 17:54:21 +0100 Subject: [PATCH 33/43] Warning fixes --- include/vsg/core/Allocator.h | 6 +++--- src/vsg/core/Allocator.cpp | 15 ++++++++------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index c0fd5f0cf..d31511654 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -227,14 +227,14 @@ namespace vsg Element* memory = nullptr; Element* memoryEnd = nullptr; - size_t capacity = 0; size_t alignment = 4; // min aligment is 4 { sizeof(Element) } - size_t elementAlignment = 1; size_t blockAlignment = 16; size_t blockSize = 0; size_t maximumAllocationSize = 0; - size_t firstSlot = 1; + Element::Index elementAlignment = 1; + Element::Index firstSlot = 1; + Element::Index capacity = 0; std::vector freeLists; diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index a56dc1af0..00e4e5637 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -59,7 +59,7 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t blockSize(in_blockSize) { alignment = std::max(alignment, sizeof(Element)); // we need to be a multiple of sizeof(value_type) - elementAlignment = alignment / sizeof(Element); + elementAlignment = static_cast(alignment / sizeof(Element)); blockAlignment = std::max(alignment, alignof(std::max_align_t)); blockAlignment = std::max(blockAlignment, size_t{16}); @@ -69,13 +69,14 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t memory = static_cast(operator new (blockSize, std::align_val_t{blockAlignment})); memoryEnd = memory + blockSize / sizeof(Element); - capacity = blockSize / alignment; + capacity = static_cast(blockSize / alignment); firstSlot = static_cast(((1 + elementAlignment) / elementAlignment) * elementAlignment - 1); - size_t max_slot_size = (1 << 15); + Element::Index max_slot_size = (1 << 15); // // vsg::debug(" capacity = ", capacity, ", max_slot_size = ", max_slot_size); + // set up the free tracking to encompass the whole buffer // start at element before the first aligned element so that position 0 can be used to mark beginning or end of free lists freeLists.emplace_back(); @@ -87,12 +88,12 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t // mark the first element as 0. memory[0].index = 0; - size_t previous_position = 0; // 0 marks the beginning of the free list - size_t position = firstSlot; + Element::Index previous_position = 0; // 0 marks the beginning of the free list + Element::Index position = firstSlot; for (; position < capacity;) { - size_t aligned_start = ((position + max_slot_size) / elementAlignment) * elementAlignment; - size_t next_position = std::min(aligned_start - 1, capacity); + Element::Index aligned_start = ((position + max_slot_size) / elementAlignment) * elementAlignment; + Element::Index next_position = std::min(aligned_start - 1, capacity); memory[position] = Element{(previous_position == 0) ? 0 : (position - previous_position), next_position - position, 1}; memory[position + 1].index = static_cast(previous_position); From fd2f6518584d0acb8c02aa2c9c8dddd869413606 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 27 Jun 2024 19:34:37 +0100 Subject: [PATCH 34/43] Added vsg::getActiveDeviceMemoryList(..) method to help with debugging how much memory has been allocated etc. --- include/vsg/vk/DeviceMemory.h | 4 ++++ src/vsg/vk/DeviceMemory.cpp | 44 +++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/include/vsg/vk/DeviceMemory.h b/include/vsg/vk/DeviceMemory.h index 0cf45afed..f98013aa9 100644 --- a/include/vsg/vk/DeviceMemory.h +++ b/include/vsg/vk/DeviceMemory.h @@ -49,6 +49,7 @@ namespace vsg VkDeviceSize maximumAvailableSpace() const; size_t totalAvailableSize() const; size_t totalReservedSize() const; + size_t totalMemorySize() const; Device* getDevice() { return _device; } const Device* getDevice() const { return _device; } @@ -66,6 +67,9 @@ namespace vsg }; VSG_type_name(vsg::DeviceMemory); + using DeviceMemoryList = std::list>; + extern VSG_DECLSPEC DeviceMemoryList getActiveDeviceMemoryList(VkMemoryPropertyFlagBits properyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); + template class MappedData : public T { diff --git a/src/vsg/vk/DeviceMemory.cpp b/src/vsg/vk/DeviceMemory.cpp index d8438ff95..dc139fd2b 100644 --- a/src/vsg/vk/DeviceMemory.cpp +++ b/src/vsg/vk/DeviceMemory.cpp @@ -21,6 +21,24 @@ using namespace vsg; #define DO_CHECK 0 +static std::mutex s_DeviceMemoryListMutex; +static std::list> s_DeviceMemoryList; + +DeviceMemoryList vsg::getActiveDeviceMemoryList(VkMemoryPropertyFlagBits properyFlags) +{ + std::scoped_lock lock(s_DeviceMemoryListMutex); + DeviceMemoryList dml; + for(auto& dm : s_DeviceMemoryList) + { + auto dm_ref_ptr = dm.ref_ptr(); + if ((dm_ref_ptr->getMemoryPropertyFlags() & propertyFlags) != 0) + { + dml.push_back(dm_ref_ptr); + } + } + return dml; +} + /////////////////////////////////////////////////////////////////////////////// // // DeviceMemory @@ -72,6 +90,12 @@ DeviceMemory::DeviceMemory(Device* device, const VkMemoryRequirements& memRequir { throw Exception{"Error: Failed to allocate DeviceMemory.", result}; } + + { + std::scoped_lock lock(s_DeviceMemoryListMutex); + s_DeviceMemoryList.emplace_back(this); + vsg::info("DeviceMemory::DeviceMemory() added to s_DeviceMemoryList, s_DeviceMemoryList.size() = ", s_DeviceMemoryList.size()); + } } DeviceMemory::~DeviceMemory() @@ -84,6 +108,20 @@ DeviceMemory::~DeviceMemory() vkFreeMemory(*_device, _deviceMemory, _device->getAllocationCallbacks()); } + + { + std::scoped_lock lock(s_DeviceMemoryListMutex); + auto itr = std::find(s_DeviceMemoryList.begin(), s_DeviceMemoryList.end(), this); + if (itr != s_DeviceMemoryList.end()) + { + s_DeviceMemoryList.erase(itr); + vsg::info("DeviceMemory::~DeviceMemory() removed from s_DeviceMemoryList, s_DeviceMemoryList.size() = ", s_DeviceMemoryList.size()); + } + else + { + vsg::warn("DeviceMemory::~DeviceMemory() could not find in s_DeviceMemoryList"); + } + } } VkResult DeviceMemory::map(VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) @@ -148,3 +186,9 @@ size_t DeviceMemory::totalReservedSize() const std::scoped_lock lock(_mutex); return _memorySlots.totalReservedSize(); } + +size_t DeviceMemory::totalMemorySize() const +{ + return _memorySlots.totalMemorySize(); +} + From 70a54a20b599cdbfe31b432f46174b5fb87847d3 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Thu, 27 Jun 2024 19:36:20 +0100 Subject: [PATCH 35/43] Fixed typo --- include/vsg/vk/DeviceMemory.h | 2 +- src/vsg/vk/DeviceMemory.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/include/vsg/vk/DeviceMemory.h b/include/vsg/vk/DeviceMemory.h index f98013aa9..5304723a5 100644 --- a/include/vsg/vk/DeviceMemory.h +++ b/include/vsg/vk/DeviceMemory.h @@ -68,7 +68,7 @@ namespace vsg VSG_type_name(vsg::DeviceMemory); using DeviceMemoryList = std::list>; - extern VSG_DECLSPEC DeviceMemoryList getActiveDeviceMemoryList(VkMemoryPropertyFlagBits properyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); + extern VSG_DECLSPEC DeviceMemoryList getActiveDeviceMemoryList(VkMemoryPropertyFlagBits propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); template class MappedData : public T diff --git a/src/vsg/vk/DeviceMemory.cpp b/src/vsg/vk/DeviceMemory.cpp index dc139fd2b..f62d4ed51 100644 --- a/src/vsg/vk/DeviceMemory.cpp +++ b/src/vsg/vk/DeviceMemory.cpp @@ -24,7 +24,7 @@ using namespace vsg; static std::mutex s_DeviceMemoryListMutex; static std::list> s_DeviceMemoryList; -DeviceMemoryList vsg::getActiveDeviceMemoryList(VkMemoryPropertyFlagBits properyFlags) +DeviceMemoryList vsg::getActiveDeviceMemoryList(VkMemoryPropertyFlagBits propertyFlags) { std::scoped_lock lock(s_DeviceMemoryListMutex); DeviceMemoryList dml; From 6db8ca0ffee4901997de3eaf0041c45c621220d5 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Mon, 1 Jul 2024 17:33:58 +0100 Subject: [PATCH 36/43] Added exports to enable subclassing of InstrusiveAllocator under Windows --- include/vsg/core/Allocator.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index d31511654..db7691ce2 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -176,7 +176,7 @@ namespace vsg void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; protected: - struct MemoryBlock + struct VSG_DECLSPEC MemoryBlock { MemoryBlock(const std::string& in_name, size_t in_blockSize, size_t in_alignment); virtual ~MemoryBlock(); @@ -248,7 +248,7 @@ namespace vsg size_t totalReservedSize() const; size_t totalMemorySize() const; - struct SlotTester + struct VSG_DECLSPEC SlotTester { SlotTester(Element* in_mem, size_t in_head) : mem(in_mem), head(in_head){}; @@ -278,7 +278,7 @@ namespace vsg } }; - class MemoryBlocks + class VSG_DECLSPEC MemoryBlocks { public: MemoryBlocks(IntrusiveAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment); From 1134a386510024d7bce411d8c0d775d75e1bd189 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Mon, 1 Jul 2024 18:41:58 +0100 Subject: [PATCH 37/43] Moved IntrusiveAllocator into it's own header/source file --- include/vsg/core/Allocator.h | 180 +---- include/vsg/core/IntrusiveAllocator.h | 198 +++++ include/vsg/vk/DeviceMemory.h | 1 + src/vsg/CMakeLists.txt | 1 + src/vsg/core/Allocator.cpp | 1021 +----------------------- src/vsg/core/IntrusiveAllocator.cpp | 1042 +++++++++++++++++++++++++ src/vsg/core/MemorySlots.cpp | 2 +- src/vsg/io/Path.cpp | 1 + 8 files changed, 1247 insertions(+), 1199 deletions(-) create mode 100644 include/vsg/core/IntrusiveAllocator.h create mode 100644 src/vsg/core/IntrusiveAllocator.cpp diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index db7691ce2..d0ad0d997 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -12,11 +12,10 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI */ -#include +#include -#include #include -#include +#include namespace vsg { @@ -131,180 +130,5 @@ namespace vsg template using allocator_affinity_physics = allocator_affinity_adapter; - //////////////////////////////////////////////////////////////////////////////////////////////////// - // - // InstrusiveAllocator is the default Allocator implenentation - // - // Memory is allocated for fixed sized blocks, with indexing of allocated and available slots of memory - // are stored within the same memory block that user memory allocation are made from. The memory block - // is created a contiguous block of 4 bytes Elements, where the Element is a union of bitfield linked list - // market the beginning of the previous slot or the begging of the next, the status of whether the slot is - // allocated or available, or an index when used as part of doubling linked list of free slots. - // - // The block allocation is done based on the type of object so all nodes, data or general objects are - // allocated within the blocks containing objects of similar type. This form of block allocation helps - // scene graph traversal speeds by improving cache coherency/reducing cache missing as it ensures that - // nodes etc. are packed in adjacent memory. - // - // The instrusive indexing means there is only a 4 byte panalty for each memory allocation, and a minimum - // memory use per allocation of 12 bytes (3 Elements - 1 for the slot{previous, next, status} and 2 for the - // previous and next free list indices. - // - // The maximum size of allocations within the block allocation is (2^15-2) * 4, allocations larger than this - // are allocated using aligned versions of std::new and std::delete. - // - class VSG_DECLSPEC IntrusiveAllocator : public Allocator - { - public: - explicit IntrusiveAllocator(size_t in_defaultAlignment = 4); - explicit IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_defaultAlignment = 4); - - ~IntrusiveAllocator(); - - void report(std::ostream& out) const override; - - void* allocate(std::size_t size, AllocatorAffinity allocatorAffinity = ALLOCATOR_AFFINITY_OBJECTS) override; - - bool deallocate(void* ptr, std::size_t size) override; - - bool validate() const; - - size_t deleteEmptyMemoryBlocks() override; - size_t totalAvailableSize() const override; - size_t totalReservedSize() const override; - size_t totalMemorySize() const override; - void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; - - protected: - struct VSG_DECLSPEC MemoryBlock - { - MemoryBlock(const std::string& in_name, size_t in_blockSize, size_t in_alignment); - virtual ~MemoryBlock(); - - std::string name; - - void* allocate(std::size_t size); - bool deallocate(void* ptr, std::size_t size); - - void report(std::ostream& out) const; - - // bitfield packing of doubly-linked with status field into a 4 byte word - struct Element - { - union - { - uint32_t index; - - struct - { - unsigned int previous : 15; - unsigned int next : 15; - unsigned int status : 2; - }; - }; - - using Offset = decltype(previous); - using Status = decltype(status); - using Index = decltype(index); - - Element(size_t in_index) : - index(static_cast(in_index)) {} - - Element(size_t in_previous, size_t in_next, unsigned int in_status) : - previous(static_cast(in_previous)), - next(static_cast(in_next)), - status(in_status) {} - - Element() = default; - Element(const Element&) = default; - }; - - struct FreeList - { - Element::Index count = 0; - Element::Index head = 0; - }; - - Element* memory = nullptr; - Element* memoryEnd = nullptr; - - size_t alignment = 4; // min aligment is 4 { sizeof(Element) } - size_t blockAlignment = 16; - size_t blockSize = 0; - size_t maximumAllocationSize = 0; - Element::Index elementAlignment = 1; - Element::Index firstSlot = 1; - Element::Index capacity = 0; - - std::vector freeLists; - - bool validate() const; - - bool freeSlotsAvaible(size_t size) const; - - inline bool within(void* ptr) const { return memory <= ptr && ptr < memoryEnd; } - - size_t totalAvailableSize() const; - size_t totalReservedSize() const; - size_t totalMemorySize() const; - - struct VSG_DECLSPEC SlotTester - { - SlotTester(Element* in_mem, size_t in_head) : - mem(in_mem), head(in_head){}; - - const Element* mem = nullptr; - size_t head = 0; - - struct Entry - { - std::string name; - size_t position; - Element slot; - size_t previousFree; - size_t nextFree; - }; - - std::list elements; - - void slot(size_t position, const std::string& name); - - void report(std::ostream& out); - }; - - static inline size_t computeMaxiumAllocationSize(size_t blockSize, size_t alignment) - { - return std::min(blockSize - alignment, size_t((1 << 15) - 2) * sizeof(Element)); - } - }; - - class VSG_DECLSPEC MemoryBlocks - { - public: - MemoryBlocks(IntrusiveAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment); - virtual ~MemoryBlocks(); - - IntrusiveAllocator* parent = nullptr; - std::string name; - size_t alignment = 4; - size_t blockSize = 0; - size_t maximumAllocationSize = 0; - std::vector> memoryBlocks; - std::shared_ptr memoryBlockWithSpace; - - void* allocate(std::size_t size); - void report(std::ostream& out) const; - bool validate() const; - - size_t deleteEmptyMemoryBlocks(); - size_t totalAvailableSize() const; - size_t totalReservedSize() const; - size_t totalMemorySize() const; - }; - - std::vector> allocatorMemoryBlocks; - std::map> memoryBlocks; - std::map> largeAllocations; - }; } // namespace vsg diff --git a/include/vsg/core/IntrusiveAllocator.h b/include/vsg/core/IntrusiveAllocator.h new file mode 100644 index 000000000..e02c0b403 --- /dev/null +++ b/include/vsg/core/IntrusiveAllocator.h @@ -0,0 +1,198 @@ +#pragma once + +/* + +Copyright(c) 2024 Robert Osfield + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + */ + +#include + +#include +#include + +namespace vsg +{ + //////////////////////////////////////////////////////////////////////////////////////////////////// + // + // InstrusiveAllocator is the default Allocator implenentation + // + // Memory is allocated for fixed sized blocks, with indexing of allocated and available slots of memory + // are stored within the same memory block that user memory allocation are made from. The memory block + // is created a contiguous block of 4 bytes Elements, where the Element is a union of bitfield linked list + // market the beginning of the previous slot or the begging of the next, the status of whether the slot is + // allocated or available, or an index when used as part of doubling linked list of free slots. + // + // The block allocation is done based on the type of object so all nodes, data or general objects are + // allocated within the blocks containing objects of similar type. This form of block allocation helps + // scene graph traversal speeds by improving cache coherency/reducing cache missing as it ensures that + // nodes etc. are packed in adjacent memory. + // + // The instrusive indexing means there is only a 4 byte panalty for each memory allocation, and a minimum + // memory use per allocation of 12 bytes (3 Elements - 1 for the slot{previous, next, status} and 2 for the + // previous and next free list indices. + // + // The maximum size of allocations within the block allocation is (2^15-2) * 4, allocations larger than this + // are allocated using aligned versions of std::new and std::delete. + // + class VSG_DECLSPEC IntrusiveAllocator : public Allocator + { + public: + explicit IntrusiveAllocator(size_t in_defaultAlignment = 4); + explicit IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_defaultAlignment = 4); + + ~IntrusiveAllocator(); + + void report(std::ostream& out) const override; + + void* allocate(std::size_t size, AllocatorAffinity allocatorAffinity = ALLOCATOR_AFFINITY_OBJECTS) override; + + bool deallocate(void* ptr, std::size_t size) override; + + bool validate() const; + + size_t deleteEmptyMemoryBlocks() override; + size_t totalAvailableSize() const override; + size_t totalReservedSize() const override; + size_t totalMemorySize() const override; + void setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) override; + + protected: + struct VSG_DECLSPEC MemoryBlock + { + MemoryBlock(const std::string& in_name, size_t in_blockSize, size_t in_alignment); + virtual ~MemoryBlock(); + + std::string name; + + void* allocate(std::size_t size); + bool deallocate(void* ptr, std::size_t size); + + void report(std::ostream& out) const; + + // bitfield packing of doubly-linked with status field into a 4 byte word + struct Element + { + union + { + uint32_t index; + + struct + { + unsigned int previous : 15; + unsigned int next : 15; + unsigned int status : 2; + }; + }; + + using Offset = decltype(previous); + using Status = decltype(status); + using Index = decltype(index); + + Element(size_t in_index) : + index(static_cast(in_index)) {} + + Element(size_t in_previous, size_t in_next, unsigned int in_status) : + previous(static_cast(in_previous)), + next(static_cast(in_next)), + status(in_status) {} + + Element() = default; + Element(const Element&) = default; + }; + + struct FreeList + { + Element::Index count = 0; + Element::Index head = 0; + }; + + Element* memory = nullptr; + Element* memoryEnd = nullptr; + + size_t alignment = 4; // min aligment is 4 { sizeof(Element) } + size_t blockAlignment = 16; + size_t blockSize = 0; + size_t maximumAllocationSize = 0; + Element::Index elementAlignment = 1; + Element::Index firstSlot = 1; + Element::Index capacity = 0; + + std::vector freeLists; + + bool validate() const; + + bool freeSlotsAvaible(size_t size) const; + + inline bool within(void* ptr) const { return memory <= ptr && ptr < memoryEnd; } + + size_t totalAvailableSize() const; + size_t totalReservedSize() const; + size_t totalMemorySize() const; + + struct VSG_DECLSPEC SlotTester + { + SlotTester(Element* in_mem, size_t in_head) : + mem(in_mem), head(in_head){}; + + const Element* mem = nullptr; + size_t head = 0; + + struct Entry + { + std::string name; + size_t position; + Element slot; + size_t previousFree; + size_t nextFree; + }; + + std::list elements; + + void slot(size_t position, const std::string& name); + + void report(std::ostream& out); + }; + + static inline size_t computeMaxiumAllocationSize(size_t blockSize, size_t alignment) + { + return std::min(blockSize - alignment, size_t((1 << 15) - 2) * sizeof(Element)); + } + }; + + class VSG_DECLSPEC MemoryBlocks + { + public: + MemoryBlocks(IntrusiveAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment); + virtual ~MemoryBlocks(); + + IntrusiveAllocator* parent = nullptr; + std::string name; + size_t alignment = 4; + size_t blockSize = 0; + size_t maximumAllocationSize = 0; + std::vector> memoryBlocks; + std::shared_ptr memoryBlockWithSpace; + + void* allocate(std::size_t size); + void report(std::ostream& out) const; + bool validate() const; + + size_t deleteEmptyMemoryBlocks(); + size_t totalAvailableSize() const; + size_t totalReservedSize() const; + size_t totalMemorySize() const; + }; + + std::vector> allocatorMemoryBlocks; + std::map> memoryBlocks; + std::map> largeAllocations; + }; + +} // namespace vsg diff --git a/include/vsg/vk/DeviceMemory.h b/include/vsg/vk/DeviceMemory.h index 5304723a5..ebd86590c 100644 --- a/include/vsg/vk/DeviceMemory.h +++ b/include/vsg/vk/DeviceMemory.h @@ -13,6 +13,7 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI */ #include +#include #include #include diff --git a/src/vsg/CMakeLists.txt b/src/vsg/CMakeLists.txt index edb217ee9..287b8b445 100644 --- a/src/vsg/CMakeLists.txt +++ b/src/vsg/CMakeLists.txt @@ -11,6 +11,7 @@ endif() set(SOURCES core/Allocator.cpp + core/IntrusiveAllocator.cpp core/Auxiliary.cpp core/ConstVisitor.cpp core/Data.cpp diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 00e4e5637..7c8ea9805 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -10,7 +10,7 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI */ -#include +#include #include #include #include @@ -41,1022 +41,3 @@ void vsg::deallocate(void* ptr, std::size_t size) { Allocator::instance()->deallocate(ptr, size); } - -//////////////////////////////////////////////////////////////////////////////////////////////////// -// -// vsg::InstrusiveAllocator -// - -#define DEBUG_ALLOCATOR 0 - -////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// -// MemoryBlock -// -IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t in_blockSize, size_t in_alignment) : - name(in_name), - alignment(in_alignment), - blockSize(in_blockSize) -{ - alignment = std::max(alignment, sizeof(Element)); // we need to be a multiple of sizeof(value_type) - elementAlignment = static_cast(alignment / sizeof(Element)); - - blockAlignment = std::max(alignment, alignof(std::max_align_t)); - blockAlignment = std::max(blockAlignment, size_t{16}); - - // round blockSize up to nearest aligned size - blockSize = ((blockSize + alignment - 1) / alignment) * alignment; - - memory = static_cast(operator new (blockSize, std::align_val_t{blockAlignment})); - memoryEnd = memory + blockSize / sizeof(Element); - capacity = static_cast(blockSize / alignment); - firstSlot = static_cast(((1 + elementAlignment) / elementAlignment) * elementAlignment - 1); - - Element::Index max_slot_size = (1 << 15); - - // // vsg::debug(" capacity = ", capacity, ", max_slot_size = ", max_slot_size); - - - // set up the free tracking to encompass the whole buffer - // start at element before the first aligned element so that position 0 can be used to mark beginning or end of free lists - freeLists.emplace_back(); - FreeList& freeList = freeLists.front(); - freeList.count = 0; - freeList.head = firstSlot; - maximumAllocationSize = computeMaxiumAllocationSize(blockSize, alignment); - - // mark the first element as 0. - memory[0].index = 0; - - Element::Index previous_position = 0; // 0 marks the beginning of the free list - Element::Index position = firstSlot; - for (; position < capacity;) - { - Element::Index aligned_start = ((position + max_slot_size) / elementAlignment) * elementAlignment; - Element::Index next_position = std::min(aligned_start - 1, capacity); - - memory[position] = Element{(previous_position == 0) ? 0 : (position - previous_position), next_position - position, 1}; - memory[position + 1].index = static_cast(previous_position); - memory[position + 2].index = static_cast((next_position < capacity) ? next_position : 0); - previous_position = position; - position = next_position; - ++freeList.count; - } - -#if DEBUG_ALLOCATOR - std::cout << "IntrusiveAllocator::MemoryBlock::MemoryBlock(" << in_blockSize << ", " << in_alignment << ")" << std::endl; - - std::cout << "blockSize = " << blockSize << std::endl; - std::cout << "capacity = " << capacity << std::endl; - std::cout << "totalReservedSize = " << totalReservedSize() << std::endl; - std::cout << "totalAvailableSize = " << totalAvailableSize() << std::endl; - std::cout << "alignment = " << alignment << std::endl; - std::cout << "elementAlignment = " << elementAlignment << std::endl; - std::cout << "freeList.head = " << freeList.head << std::endl; - - report(std::cout); -#endif -} - -IntrusiveAllocator::MemoryBlock::~MemoryBlock() -{ - operator delete (memory, std::align_val_t{blockAlignment}); -} - -bool IntrusiveAllocator::MemoryBlock::freeSlotsAvaible(size_t size) const -{ - if (size > maximumAllocationSize) return false; - - for (auto& freeList : freeLists) - { - if (freeList.count > 0) return true; - } - return false; -} - -void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) -{ -#if DEBUG_ALLOCATOR - if (!validate()) std::cout << "ERROR detected before IntrusiveAllocator::MemoryBlock::allocate(" << size << ") " << this << std::endl; -#endif - - // check if maximumAllocationSize is big enough - if (size > maximumAllocationSize) return nullptr; - - const size_t minimumNumElementsInSlot = 3; - - for (auto& freeList : freeLists) - { - // check if freeList has available slots - if (freeList.count == 0) continue; - - Element::Index freePosition = freeList.head; - while (freePosition != 0) - { - auto& slot = memory[freePosition]; - if (slot.status != 1) - { - throw "Warning: allocated slot found in freeList"; - } - - Element::Index previousFreePosition = memory[freePosition + 1].index; - Element::Index nextFreePosition = memory[freePosition + 2].index; - - size_t slotSpace = static_cast(slot.next); - if (slot.next == 0) - { - std::cerr << "Warn: IntrusiveAllocator::MemoryBlock::allocate(" << size << ") slot = { " << static_cast(slot.previous) << ", " << static_cast(slot.next) << ", " << static_cast(slot.status) << " }" << std::endl; - } - - Element::Index nextPosition = freePosition + static_cast(slotSpace); - size_t slotSize = sizeof(Element) * (slotSpace - 1); - - if (size <= slotSize) - { - // we can us slot for memory; - - size_t numElementsToBeUsed = std::max((size + sizeof(Element) - 1) / sizeof(Element), minimumNumElementsInSlot); - Element::Index nextAlignedStart = static_cast(((freePosition + 1 + numElementsToBeUsed + elementAlignment) / elementAlignment) * elementAlignment); - Element::Index minimumAlignedEnd = nextAlignedStart + static_cast(minimumNumElementsInSlot); -#if DEBUG_ALLOCATOR - std::cout << "allocating, size = " << size << ", numElementsToBeUsed = " << numElementsToBeUsed << ", freePosition = " << freePosition << ", nextPosition = " << nextPosition << ", nextAlignedStart = " << nextAlignedStart << ", minimumAlignedEnd = " << minimumAlignedEnd << std::endl; -#endif - if (minimumAlignedEnd < nextPosition) - { - - // enough space in slot to split, so adjust - Element::Index newSlotPosition = nextAlignedStart - 1; - slot.next = static_cast(newSlotPosition - freePosition); - -#if DEBUG_ALLOCATOR - std::cout << "splitting slot newSlotPosition = " << newSlotPosition << std::endl; -#endif - // set up the new slot as a free slot - auto& newSlot = memory[newSlotPosition] = Element(slot.next, static_cast(nextPosition - newSlotPosition), 1); - memory[newSlotPosition + 1].index = previousFreePosition; - memory[newSlotPosition + 2].index = nextFreePosition; - - if (previousFreePosition != 0) - { - // need to update the previous slot in the free list - memory[previousFreePosition + 2].index = newSlotPosition; // set previous free slots next index to the newly created slot - } - - if (nextFreePosition != 0) - { - // need to update the previous slot in the free list - memory[nextFreePosition + 1].index = newSlotPosition; // set next free slots previous index to the newly created slot - } - - if (nextPosition < capacity) - { - auto& nextSlot = memory[nextPosition]; - nextSlot.previous = newSlot.next; - } - - if (freePosition == freeList.head) - { - // slot was at head of freeList so move it to the new slot position - freeList.head = newSlotPosition; - } - } - else - { - - // std::cout<<"Removing slot as it's fully used freePosition = "<(slot.status) << std::endl; - else - std::cout << "ERROR detected after IntrusiveAllocator::MemoryBlock::allocate(" << size << ") " << this << " allocated = " << &memory[freePosition + 1] << std::endl; -#endif - - return &memory[freePosition + 1]; - } - - freePosition = nextFreePosition; - } - } - -#if DEBUG_ALLOCATOR - std::cout << "IntrusiveAllocator::MemoryBlock::allocator(" << size << ") " << this << " No space found" << std::endl; -#endif - - return nullptr; -} - -void IntrusiveAllocator::MemoryBlock::SlotTester::slot(size_t position, const std::string& name) -{ - if (mem[position].status == 0) - elements.push_back(Entry{name, position, mem[position], 0, 0}); - else - elements.push_back(Entry{name, position, mem[position], mem[position + 1].index, mem[position + 2].index}); -} - -void IntrusiveAllocator::MemoryBlock::SlotTester::report(std::ostream& out) -{ - out << "head = " << head << std::endl; - for (auto& entry : elements) - { - out << " " << entry.name << ", pos = " << entry.position << " slot { " << entry.slot.previous << ", " << entry.slot.next << ", " << static_cast(entry.slot.status) << " } "; - if (entry.slot.status != 0) - out << " previous free = " << entry.previousFree << ", next free = " << entry.nextFree << std::endl; - else - out << std::endl; - } -} - -bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/) -{ - if (within(ptr)) - { - auto& freeList = freeLists.front(); - size_t maxSize = 1 + maximumAllocationSize / sizeof(Element); - - // - // sequential slots around the slot to be deallocated are named: - // PP (Previous' Previous), P (Previous), C (Current slot being deallocated), N (Next), NN (Next's Next) - // - // the FreeList linked list entries of interest are named: - // PPF (Previous' Previous Free), PNF (Previous's Next Free), NPF (Next's Previous Free), NNF (Next's Next Free) - // - - Element::Index C = static_cast(static_cast(ptr) - memory) - 1; - auto& slot = memory[C]; - -#if DEBUG_ALLOCATOR - if (validate()) - { - std::cout << "IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << " C = " << C << ", slot = {" << slot.previous << ", " << slot.next << ", " << static_cast(slot.status) << std::endl; - } - else - { - std::cout << "ERROR detected befpre IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << std::endl; - } -#endif - - if (slot.next == 0) - { - std::cerr << "Warn: IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ") C = " << C << ", slot = { " << slot.previous << ", " << slot.next << ", " << slot.status << " }" << std::endl; - throw "slot.ext == 0"; - } - - if (slot.status != 0) - { - std::cerr << "Warn: IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ") C = " << C << ", Attempt to deallocate already available slot : slot = { " << slot.previous << ", " << slot.next << ", " << slot.status << " }" << std::endl; - throw "Attempt to deallocate already available slot"; - } - - // set up the indices for the previous and next slots - Element::Index P = (slot.previous > 0) ? (C - static_cast(slot.previous)) : 0; - Element::Index N = C + static_cast(slot.next); - if (N >= capacity) N = 0; - - // set up the indices for the previous free entry - Element::Index PPF = 0; - Element::Index PNF = 0; - if (P != 0) - { - if (memory[P].status != 0) - { - PPF = memory[P + 1].index; - PNF = memory[P + 2].index; - } - } - - // set up the indices for the next free entries - Element::Index NN = 0; - Element::Index NPF = 0; - Element::Index NNF = 0; - if (N != 0) - { - NN = N + static_cast(memory[N].next); - if (NN >= capacity) NN = 0; - - if (memory[N].status != 0) - { - NPF = memory[N + 1].index; - NNF = memory[N + 2].index; - } - } - - // 3 way merge of P, C and C - auto mergePCN = [&]() -> void { -#if DEBUG_ALLOCATOR - SlotTester before(memory, freeList.head); - before.slot(P, "P"); - before.slot(C, "C"); - before.slot(N, "N"); - before.slot(PPF, "PPF"); - before.slot(PNF, "PNF"); - before.slot(NPF, "NPF"); - before.slot(NNF, "NNF"); -#endif - // update slots for the merge - memory[P].next += memory[C].next + memory[N].next; - if (NN != 0) memory[NN].previous = memory[P].next; - - // update freeList linked list entries - if (PNF == N) // also implies NPF == P - { - // case 1. in order sequential -#if DEBUG_ALLOCATOR - std::cout << " case 1. in order sequential" << std::endl; -#endif - memory[P + 2].index = NNF; - if (NNF != 0) memory[NNF + 1].index = P; - } - else if (PPF == N) // also implies NNF == P - { - // case 2. reverse sequential -#if DEBUG_ALLOCATOR - std::cout << " case 2. reverse sequential" << std::endl; -#endif - if (freeList.head == N) - { - freeList.head = P; - memory[P + 1] = 0; - } - else - { - memory[P + 1].index = NPF; - if (NPF != 0) memory[NPF + 2] = P; - } - } - else // P and N aren't directly connected within the freeList - { - // case 3. disconnected -#if DEBUG_ALLOCATOR - std::cout << " case 3. disconnected" << std::endl; -#endif - if (NPF != 0) memory[NPF + 2].index = NNF; - if (NNF != 0) memory[NNF + 1].index = NPF; - - if (freeList.head == N) - { - freeList.head = NNF; - } - } - - // N slot is nolonger a seperate free slot so decrement free count - --freeList.count; - -#if DEBUG_ALLOCATOR - if (!validate()) - { - SlotTester after(memory, freeList.head); - after.slot(P, "P"); - after.slot(PPF, "PPF"); - after.slot(PNF, "PNF"); - after.slot(NPF, "NPF"); - after.slot(NNF, "NNF"); - - std::cout << "ERROR detected after mergePCN() IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << std::endl; - - std::cout << "Before: "; - before.report(std::cout); - std::cout << "After: "; - after.report(std::cout); - } -#endif - }; - - // 2 way merge of P and C - auto mergePC = [&]() -> void { - // update slots for the merge - memory[P].next += memory[C].next; - if (N != 0) memory[N].previous = memory[P].next; - - // freeList linked list entries will not need updating. - -#if DEBUG_ALLOCATOR - if (!validate()) - { - std::cout << "ERROR detected after mergePC() IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << std::endl; - } -#endif - }; - - // 2 way merge of C and N - auto mergeCN = [&]() -> void { - // update slots for merge - memory[C].status = 1; - memory[C].next += memory[N].next; - if (NN != 0) memory[NN].previous = memory[C].next; - - // update freeList linked list entries - if (NPF != 0) memory[NPF + 2].index = C; - if (NNF != 0) memory[NNF + 1].index = C; - memory[C + 1].index = NPF; - memory[C + 2].index = NNF; - - // if N was the head then change head to C - if (freeList.head == N) freeList.head = C; - -#if DEBUG_ALLOCATOR - if (!validate()) - { - std::cout << "ERROR detected after mergeCN() IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << std::endl; - } -#endif - }; - - // standalone insertion of C into head of freeList - auto standalone = [&]() -> void { - memory[C].status = 1; - memory[C + 1].index = 0; - memory[C + 2].index = freeList.head; - - if (freeList.head != 0) - { - memory[freeList.head + 1] = C; // set previous heads previousFree to C. - } - - // set the head to C. - freeList.head = C; - - // Inserted new free slot so increment free count - ++freeList.count; - -#if DEBUG_ALLOCATOR - if (!validate()) - { - std::cout << "ERROR detected after standalone() IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << " C = " << C << ", memory[C + 2].index = " << memory[C + 2].index << std::endl; - } -#endif - }; - - if (P != 0 && memory[P].status != 0) - { - if (N != 0 && memory[N].status != 0) - { - if ((static_cast(memory[P].next) + static_cast(memory[C].next) + static_cast(memory[N].next)) <= maxSize) - mergePCN(); - else if ((static_cast(memory[P].next) + static_cast(memory[C].next)) <= maxSize) - mergePC(); // merge P and C - else if ((static_cast(memory[C].next) + static_cast(memory[N].next)) <= maxSize) - mergeCN(); // merge C and N - else - standalone(); // C is standalone - } - else if ((static_cast(memory[P].next) + static_cast(memory[C].next)) <= maxSize) - mergePC(); // merge P and C - else - standalone(); // C is standalone - } - else if (N != 0 && memory[N].status != 0) - { - if (static_cast(memory[C].next) + static_cast(memory[N].next) <= maxSize) - mergeCN(); // merge C and N - else - standalone(); // standalone - } - else - { - // C is standalone - standalone(); - } - - return true; - } - -#if DEBUG_ALLOCATOR - std::cout << "IntrusiveAllocator::MemoryBlock::deallocate((" << ptr << ", " << size << ") OUTWITH block : " << this << std::endl; -#endif - - return false; -} - -void IntrusiveAllocator::MemoryBlock::report(std::ostream& out) const -{ - out << "MemoryBlock " << this << " " << name << std::endl; - out << " alignment = " << alignment << std::endl; - out << " blockAlignment = " << blockAlignment << std::endl; - out << " blockSize = " << blockSize << ", memory = " << static_cast(memory) << std::endl; - out << " maximumAllocationSize = " << maximumAllocationSize << std::endl; - out << " firstSlot = "<< firstSlot< allocated; - std::set available; - - while (position < capacity) - { - auto& slot = memory[position]; - if (slot.previous > capacity || slot.next > capacity) - { - std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " slot.corrupted invalid position = " << position << ", slot = {" << slot.previous << ", " << slot.next << ", " << int(slot.status) << "}" << std::endl; - return false; - } - - if (slot.status == 0) - allocated.insert(position); - else - available.insert(position); - - if (slot.previous != 0) - { - if (slot.previous > position) - { - std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " slot.previous invalid position = " << position << ", slot = {" << slot.previous << ", " << slot.next << ", " << int(slot.status) << "}" << std::endl; - return false; - } - size_t previous_position = position - slot.previous; - if (previous_position != previous) - { - std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed : previous slot = " << previous << " doesn't match slot.previous, position = " << position << ", slot = {" << slot.previous << ", " << slot.next << ", " << int(slot.status) << "}" << std::endl; - return false; - } - - size_t previousFree = memory[position + 1].index; - size_t nextFree = memory[position + 2].index; - if (previousFree == position || nextFree == position) - { - std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed : slot's previous/nextFree points back to itself, position = " << position << ", slot = {" << slot.previous << ", " << slot.next << ", " << int(slot.status) << "} previousFree = " << previousFree << ", nextFree = " << nextFree << std::endl; - return false; - } - } - - if (slot.next == 0) - { - std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed: position = " << position << " slot = {" << slot.previous << ", " << slot.next << ", " << static_cast(slot.status) << "}" << std::endl; - return false; - } - - previous = position; - position += slot.next; - } - - std::set inFreeList; - - // std::cout<<"No invalid entries found"<(slot.status) << "}" << std::endl; - return false; - } - - if (memory[freePosition + 1].index != previousPosition || memory[freePosition + 1].index == freePosition) - { - std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed, free list inconsisntent, head = " << freeList.head << ", previousPosition = " << previousPosition << ", freePosition = " << freePosition << ", slot = {" << slot.previous << ", " << slot.next << ", " << static_cast(slot.status) << "} previousFree = " << memory[freePosition + 1].index << ", nextFree = " << memory[freePosition + 2].index << std::endl; - return false; - } - - previousPosition = freePosition; - freePosition = memory[freePosition + 2].index; - } - } - - if (available.size() != inFreeList.size()) - { - std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed, Different number of entries in available and in freeList: available.size() = " << available.size() << ", inFreeList.size() = " << inFreeList.size() << std::endl; - return false; - } - - return true; -} - -size_t IntrusiveAllocator::MemoryBlock::totalAvailableSize() const -{ - size_t count = 0; - size_t position = firstSlot; - while (position < capacity) - { - auto& slot = memory[position]; - position += slot.next; - if (slot.status != 0) count += slot.next - 1; - } - - return count * sizeof(Element); -} - -size_t IntrusiveAllocator::MemoryBlock::totalReservedSize() const -{ - size_t count = 0; - size_t position = firstSlot; - while (position < capacity) - { - auto& slot = memory[position]; - position += slot.next; - if (slot.status == 0) count += slot.next - 1; - } - - return count * sizeof(Element); -} - -size_t IntrusiveAllocator::MemoryBlock::totalMemorySize() const -{ - size_t count = 0; - size_t position = firstSlot; - while (position < capacity) - { - auto& slot = memory[position]; - position += slot.next; - count += slot.next - 1; - } - - return count * sizeof(Element); -} - -////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// -// MemoryBlocks -// -IntrusiveAllocator::MemoryBlocks::MemoryBlocks(IntrusiveAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment) : - parent(in_parent), - name(in_name), - alignment(in_alignment), - blockSize(in_blockSize), - maximumAllocationSize(IntrusiveAllocator::MemoryBlock::computeMaxiumAllocationSize(in_blockSize, in_alignment)) -{ -} - -IntrusiveAllocator::MemoryBlocks::~MemoryBlocks() -{ -} - -void* IntrusiveAllocator::MemoryBlocks::allocate(std::size_t size) -{ - if (memoryBlockWithSpace) - { - auto ptr = memoryBlockWithSpace->allocate(size); - if (ptr) return ptr; - } - - size_t new_blockSize = std::max(size, blockSize); - for (auto& block : memoryBlocks) - { - if (block != memoryBlockWithSpace) - { - auto ptr = block->allocate(size); - if (ptr) return ptr; - } - } - - auto new_block = std::make_shared(name, new_blockSize, alignment); - if (parent) - { - parent->memoryBlocks[new_block->memory] = new_block; - } - - if (memoryBlocks.empty()) - { - maximumAllocationSize = new_block->maximumAllocationSize; - } - - memoryBlockWithSpace = new_block; - memoryBlocks.push_back(new_block); - - auto ptr = new_block->allocate(size); - - return ptr; -} - -void IntrusiveAllocator::MemoryBlocks::report(std::ostream& out) const -{ - out << "IntrusiveAllocator::MemoryBlocks::report() memoryBlocks.size() = " << memoryBlocks.size() << std::endl; - for (auto& memoryBlock : memoryBlocks) - { - memoryBlock->report(out); - } -} - -bool IntrusiveAllocator::MemoryBlocks::validate() const -{ - bool valid = true; - for (auto& memoryBlock : memoryBlocks) - { - valid = memoryBlock->validate() && valid; - } - return valid; -} - -size_t IntrusiveAllocator::MemoryBlocks::deleteEmptyMemoryBlocks() -{ - size_t count = 0; - decltype(memoryBlocks) remainingBlocks; - for(auto& memoryBlock : memoryBlocks) - { - if (memoryBlock->totalReservedSize() == 0) - { - count += memoryBlock->totalAvailableSize(); - } - else - { - remainingBlocks.push_back(memoryBlock); - } - } - memoryBlocks.swap(remainingBlocks); - - return count; -} - -size_t IntrusiveAllocator::MemoryBlocks::totalAvailableSize() const -{ - size_t count = 0; - for(auto& memoryBlock : memoryBlocks) - { - count += memoryBlock->totalAvailableSize(); - } - return count; -} - -size_t IntrusiveAllocator::MemoryBlocks::totalReservedSize() const -{ - size_t count = 0; - for(auto& memoryBlock : memoryBlocks) - { - count += memoryBlock->totalReservedSize(); - } - return count; -} - -size_t IntrusiveAllocator::MemoryBlocks::totalMemorySize() const -{ - size_t count = 0; - for(auto& memoryBlock : memoryBlocks) - { - count += memoryBlock->totalMemorySize(); - } - return count; -} - -////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// -// IntrusiveAllocator -// -IntrusiveAllocator::IntrusiveAllocator(size_t in_defaultAlignment) : - Allocator(in_defaultAlignment) -{ - size_t Megabyte = size_t(1024) * size_t(1024); - size_t blockSize = size_t(1) * Megabyte; - - allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, defaultAlignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, defaultAlignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, defaultAlignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); -} - -IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_defaultAlignment) : - Allocator(std::move(in_nestedAllocator), in_defaultAlignment) -{ - size_t Megabyte = size_t(1024) * size_t(1024); - size_t blockSize = size_t(1) * Megabyte; - - allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, defaultAlignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, defaultAlignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, defaultAlignment)); - allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); -} - -IntrusiveAllocator::~IntrusiveAllocator() -{ -} - -void IntrusiveAllocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) -{ - std::scoped_lock lock(mutex); - - if (size_t(allocatorAffinity) < allocatorMemoryBlocks.size()) - { - allocatorMemoryBlocks[allocatorAffinity]->blockSize = blockSize; - } - else - { - auto name = vsg::make_string("MemoryBlocks_", allocatorAffinity); - - allocatorMemoryBlocks.resize(allocatorAffinity + 1); - allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, name, blockSize, defaultAlignment)); - } -} - -void IntrusiveAllocator::report(std::ostream& out) const -{ - out << "IntrusiveAllocator::report() " << allocatorMemoryBlocks.size() << std::endl; - - for (const auto& memoryBlock : allocatorMemoryBlocks) - { - if (memoryBlock) memoryBlock->report(out); - } - - validate(); -} - -void* IntrusiveAllocator::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) -{ - std::scoped_lock lock(mutex); - - // create a MemoryBlocks entry if one doesn't already exist - if (allocatorAffinity > allocatorMemoryBlocks.size()) - { - size_t blockSize = 1024 * 1024; // Megabyte - allocatorMemoryBlocks.resize(allocatorAffinity + 1); - allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, "MemoryBlockAffinity", blockSize, defaultAlignment)); - } - - void* ptr = nullptr; - - auto& blocks = allocatorMemoryBlocks[allocatorAffinity]; - if (blocks) - { - if (size <= blocks->maximumAllocationSize) - { - ptr = blocks->allocate(size); - if (ptr) return ptr; - //std::cout<<"IntrusiveAllocator::allocate() Failed to allocator memory from memoryBlocks "<alignment}); - if (ptr) largeAllocations[ptr] = std::pair(blocks->alignment, size); - //std::cout<<"IntrusiveAllocator::allocate() MemoryBlocks aligned large allocation = "<maximumAllocationSize = "<maximumAllocationSize<(defaultAlignment, size); - //std::cout<<"IntrusiveAllocator::allocate() default aligned large allocation = "<second.first}); - largeAllocations.erase(la_itr); - return true; - } - - if (nestedAllocator && nestedAllocator->deallocate(ptr, size)) - { - return true; - } - - return false; -} - -bool IntrusiveAllocator::validate() const -{ - bool valid = true; - for (auto& memoryBlock : allocatorMemoryBlocks) - { - valid = memoryBlock->validate() && valid; - } - return valid; -} - -size_t IntrusiveAllocator::deleteEmptyMemoryBlocks() -{ - size_t count = 0; - for(auto& blocks : allocatorMemoryBlocks) - { - count += blocks->deleteEmptyMemoryBlocks(); - } - return count; -} - -size_t IntrusiveAllocator::totalAvailableSize() const -{ - size_t count = 0; - for(auto& blocks : allocatorMemoryBlocks) - { - count += blocks->totalAvailableSize(); - } - return count; -} - -size_t IntrusiveAllocator::totalReservedSize() const -{ - size_t count = 0; - for(auto& blocks : allocatorMemoryBlocks) - { - count += blocks->totalReservedSize(); - } - return count; -} - -size_t IntrusiveAllocator::totalMemorySize() const -{ - size_t count = 0; - for(auto& blocks : allocatorMemoryBlocks) - { - count += blocks->totalMemorySize(); - } - return count; -} diff --git a/src/vsg/core/IntrusiveAllocator.cpp b/src/vsg/core/IntrusiveAllocator.cpp new file mode 100644 index 000000000..7529d4066 --- /dev/null +++ b/src/vsg/core/IntrusiveAllocator.cpp @@ -0,0 +1,1042 @@ +/* + +Copyright(c) 2024 Robert Osfield + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + */ + +#include +#include +#include +#include +#include + +#include +#include +#include + +using namespace vsg; + +//////////////////////////////////////////////////////////////////////////////////////////////////// +// +// vsg::InstrusiveAllocator +// + +#define DEBUG_ALLOCATOR 0 + +////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// +// MemoryBlock +// +IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t in_blockSize, size_t in_alignment) : + name(in_name), + alignment(in_alignment), + blockSize(in_blockSize) +{ + alignment = std::max(alignment, sizeof(Element)); // we need to be a multiple of sizeof(value_type) + elementAlignment = static_cast(alignment / sizeof(Element)); + + blockAlignment = std::max(alignment, alignof(std::max_align_t)); + blockAlignment = std::max(blockAlignment, size_t{16}); + + // round blockSize up to nearest aligned size + blockSize = ((blockSize + alignment - 1) / alignment) * alignment; + + memory = static_cast(operator new (blockSize, std::align_val_t{blockAlignment})); + memoryEnd = memory + blockSize / sizeof(Element); + capacity = static_cast(blockSize / alignment); + firstSlot = static_cast(((1 + elementAlignment) / elementAlignment) * elementAlignment - 1); + + Element::Index max_slot_size = (1 << 15); + + // // vsg::debug(" capacity = ", capacity, ", max_slot_size = ", max_slot_size); + + + // set up the free tracking to encompass the whole buffer + // start at element before the first aligned element so that position 0 can be used to mark beginning or end of free lists + freeLists.emplace_back(); + FreeList& freeList = freeLists.front(); + freeList.count = 0; + freeList.head = firstSlot; + maximumAllocationSize = computeMaxiumAllocationSize(blockSize, alignment); + + // mark the first element as 0. + memory[0].index = 0; + + Element::Index previous_position = 0; // 0 marks the beginning of the free list + Element::Index position = firstSlot; + for (; position < capacity;) + { + Element::Index aligned_start = ((position + max_slot_size) / elementAlignment) * elementAlignment; + Element::Index next_position = std::min(aligned_start - 1, capacity); + + memory[position] = Element{(previous_position == 0) ? 0 : (position - previous_position), next_position - position, 1}; + memory[position + 1].index = static_cast(previous_position); + memory[position + 2].index = static_cast((next_position < capacity) ? next_position : 0); + previous_position = position; + position = next_position; + ++freeList.count; + } + +#if DEBUG_ALLOCATOR + std::cout << "IntrusiveAllocator::MemoryBlock::MemoryBlock(" << in_blockSize << ", " << in_alignment << ")" << std::endl; + + std::cout << "blockSize = " << blockSize << std::endl; + std::cout << "capacity = " << capacity << std::endl; + std::cout << "totalReservedSize = " << totalReservedSize() << std::endl; + std::cout << "totalAvailableSize = " << totalAvailableSize() << std::endl; + std::cout << "alignment = " << alignment << std::endl; + std::cout << "elementAlignment = " << elementAlignment << std::endl; + std::cout << "freeList.head = " << freeList.head << std::endl; + + report(std::cout); +#endif +} + +IntrusiveAllocator::MemoryBlock::~MemoryBlock() +{ + operator delete (memory, std::align_val_t{blockAlignment}); +} + +bool IntrusiveAllocator::MemoryBlock::freeSlotsAvaible(size_t size) const +{ + if (size > maximumAllocationSize) return false; + + for (auto& freeList : freeLists) + { + if (freeList.count > 0) return true; + } + return false; +} + +void* IntrusiveAllocator::MemoryBlock::allocate(std::size_t size) +{ +#if DEBUG_ALLOCATOR + if (!validate()) std::cout << "ERROR detected before IntrusiveAllocator::MemoryBlock::allocate(" << size << ") " << this << std::endl; +#endif + + // check if maximumAllocationSize is big enough + if (size > maximumAllocationSize) return nullptr; + + const size_t minimumNumElementsInSlot = 3; + + for (auto& freeList : freeLists) + { + // check if freeList has available slots + if (freeList.count == 0) continue; + + Element::Index freePosition = freeList.head; + while (freePosition != 0) + { + auto& slot = memory[freePosition]; + if (slot.status != 1) + { + throw "Warning: allocated slot found in freeList"; + } + + Element::Index previousFreePosition = memory[freePosition + 1].index; + Element::Index nextFreePosition = memory[freePosition + 2].index; + + size_t slotSpace = static_cast(slot.next); + if (slot.next == 0) + { + std::cerr << "Warn: IntrusiveAllocator::MemoryBlock::allocate(" << size << ") slot = { " << static_cast(slot.previous) << ", " << static_cast(slot.next) << ", " << static_cast(slot.status) << " }" << std::endl; + } + + Element::Index nextPosition = freePosition + static_cast(slotSpace); + size_t slotSize = sizeof(Element) * (slotSpace - 1); + + if (size <= slotSize) + { + // we can us slot for memory; + + size_t numElementsToBeUsed = std::max((size + sizeof(Element) - 1) / sizeof(Element), minimumNumElementsInSlot); + Element::Index nextAlignedStart = static_cast(((freePosition + 1 + numElementsToBeUsed + elementAlignment) / elementAlignment) * elementAlignment); + Element::Index minimumAlignedEnd = nextAlignedStart + static_cast(minimumNumElementsInSlot); +#if DEBUG_ALLOCATOR + std::cout << "allocating, size = " << size << ", numElementsToBeUsed = " << numElementsToBeUsed << ", freePosition = " << freePosition << ", nextPosition = " << nextPosition << ", nextAlignedStart = " << nextAlignedStart << ", minimumAlignedEnd = " << minimumAlignedEnd << std::endl; +#endif + if (minimumAlignedEnd < nextPosition) + { + + // enough space in slot to split, so adjust + Element::Index newSlotPosition = nextAlignedStart - 1; + slot.next = static_cast(newSlotPosition - freePosition); + +#if DEBUG_ALLOCATOR + std::cout << "splitting slot newSlotPosition = " << newSlotPosition << std::endl; +#endif + // set up the new slot as a free slot + auto& newSlot = memory[newSlotPosition] = Element(slot.next, static_cast(nextPosition - newSlotPosition), 1); + memory[newSlotPosition + 1].index = previousFreePosition; + memory[newSlotPosition + 2].index = nextFreePosition; + + if (previousFreePosition != 0) + { + // need to update the previous slot in the free list + memory[previousFreePosition + 2].index = newSlotPosition; // set previous free slots next index to the newly created slot + } + + if (nextFreePosition != 0) + { + // need to update the previous slot in the free list + memory[nextFreePosition + 1].index = newSlotPosition; // set next free slots previous index to the newly created slot + } + + if (nextPosition < capacity) + { + auto& nextSlot = memory[nextPosition]; + nextSlot.previous = newSlot.next; + } + + if (freePosition == freeList.head) + { + // slot was at head of freeList so move it to the new slot position + freeList.head = newSlotPosition; + } + } + else + { + + // std::cout<<"Removing slot as it's fully used freePosition = "<(slot.status) << std::endl; + else + std::cout << "ERROR detected after IntrusiveAllocator::MemoryBlock::allocate(" << size << ") " << this << " allocated = " << &memory[freePosition + 1] << std::endl; +#endif + + return &memory[freePosition + 1]; + } + + freePosition = nextFreePosition; + } + } + +#if DEBUG_ALLOCATOR + std::cout << "IntrusiveAllocator::MemoryBlock::allocator(" << size << ") " << this << " No space found" << std::endl; +#endif + + return nullptr; +} + +void IntrusiveAllocator::MemoryBlock::SlotTester::slot(size_t position, const std::string& name) +{ + if (mem[position].status == 0) + elements.push_back(Entry{name, position, mem[position], 0, 0}); + else + elements.push_back(Entry{name, position, mem[position], mem[position + 1].index, mem[position + 2].index}); +} + +void IntrusiveAllocator::MemoryBlock::SlotTester::report(std::ostream& out) +{ + out << "head = " << head << std::endl; + for (auto& entry : elements) + { + out << " " << entry.name << ", pos = " << entry.position << " slot { " << entry.slot.previous << ", " << entry.slot.next << ", " << static_cast(entry.slot.status) << " } "; + if (entry.slot.status != 0) + out << " previous free = " << entry.previousFree << ", next free = " << entry.nextFree << std::endl; + else + out << std::endl; + } +} + +bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/) +{ + if (within(ptr)) + { + auto& freeList = freeLists.front(); + size_t maxSize = 1 + maximumAllocationSize / sizeof(Element); + + // + // sequential slots around the slot to be deallocated are named: + // PP (Previous' Previous), P (Previous), C (Current slot being deallocated), N (Next), NN (Next's Next) + // + // the FreeList linked list entries of interest are named: + // PPF (Previous' Previous Free), PNF (Previous's Next Free), NPF (Next's Previous Free), NNF (Next's Next Free) + // + + Element::Index C = static_cast(static_cast(ptr) - memory) - 1; + auto& slot = memory[C]; + +#if DEBUG_ALLOCATOR + if (validate()) + { + std::cout << "IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << " C = " << C << ", slot = {" << slot.previous << ", " << slot.next << ", " << static_cast(slot.status) << std::endl; + } + else + { + std::cout << "ERROR detected befpre IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << std::endl; + } +#endif + + if (slot.next == 0) + { + std::cerr << "Warn: IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ") C = " << C << ", slot = { " << slot.previous << ", " << slot.next << ", " << slot.status << " }" << std::endl; + throw "slot.ext == 0"; + } + + if (slot.status != 0) + { + std::cerr << "Warn: IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ") C = " << C << ", Attempt to deallocate already available slot : slot = { " << slot.previous << ", " << slot.next << ", " << slot.status << " }" << std::endl; + throw "Attempt to deallocate already available slot"; + } + + // set up the indices for the previous and next slots + Element::Index P = (slot.previous > 0) ? (C - static_cast(slot.previous)) : 0; + Element::Index N = C + static_cast(slot.next); + if (N >= capacity) N = 0; + + // set up the indices for the previous free entry + Element::Index PPF = 0; + Element::Index PNF = 0; + if (P != 0) + { + if (memory[P].status != 0) + { + PPF = memory[P + 1].index; + PNF = memory[P + 2].index; + } + } + + // set up the indices for the next free entries + Element::Index NN = 0; + Element::Index NPF = 0; + Element::Index NNF = 0; + if (N != 0) + { + NN = N + static_cast(memory[N].next); + if (NN >= capacity) NN = 0; + + if (memory[N].status != 0) + { + NPF = memory[N + 1].index; + NNF = memory[N + 2].index; + } + } + + // 3 way merge of P, C and C + auto mergePCN = [&]() -> void { +#if DEBUG_ALLOCATOR + SlotTester before(memory, freeList.head); + before.slot(P, "P"); + before.slot(C, "C"); + before.slot(N, "N"); + before.slot(PPF, "PPF"); + before.slot(PNF, "PNF"); + before.slot(NPF, "NPF"); + before.slot(NNF, "NNF"); +#endif + // update slots for the merge + memory[P].next += memory[C].next + memory[N].next; + if (NN != 0) memory[NN].previous = memory[P].next; + + // update freeList linked list entries + if (PNF == N) // also implies NPF == P + { + // case 1. in order sequential +#if DEBUG_ALLOCATOR + std::cout << " case 1. in order sequential" << std::endl; +#endif + memory[P + 2].index = NNF; + if (NNF != 0) memory[NNF + 1].index = P; + } + else if (PPF == N) // also implies NNF == P + { + // case 2. reverse sequential +#if DEBUG_ALLOCATOR + std::cout << " case 2. reverse sequential" << std::endl; +#endif + if (freeList.head == N) + { + freeList.head = P; + memory[P + 1] = 0; + } + else + { + memory[P + 1].index = NPF; + if (NPF != 0) memory[NPF + 2] = P; + } + } + else // P and N aren't directly connected within the freeList + { + // case 3. disconnected +#if DEBUG_ALLOCATOR + std::cout << " case 3. disconnected" << std::endl; +#endif + if (NPF != 0) memory[NPF + 2].index = NNF; + if (NNF != 0) memory[NNF + 1].index = NPF; + + if (freeList.head == N) + { + freeList.head = NNF; + } + } + + // N slot is nolonger a seperate free slot so decrement free count + --freeList.count; + +#if DEBUG_ALLOCATOR + if (!validate()) + { + SlotTester after(memory, freeList.head); + after.slot(P, "P"); + after.slot(PPF, "PPF"); + after.slot(PNF, "PNF"); + after.slot(NPF, "NPF"); + after.slot(NNF, "NNF"); + + std::cout << "ERROR detected after mergePCN() IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << std::endl; + + std::cout << "Before: "; + before.report(std::cout); + std::cout << "After: "; + after.report(std::cout); + } +#endif + }; + + // 2 way merge of P and C + auto mergePC = [&]() -> void { + // update slots for the merge + memory[P].next += memory[C].next; + if (N != 0) memory[N].previous = memory[P].next; + + // freeList linked list entries will not need updating. + +#if DEBUG_ALLOCATOR + if (!validate()) + { + std::cout << "ERROR detected after mergePC() IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << std::endl; + } +#endif + }; + + // 2 way merge of C and N + auto mergeCN = [&]() -> void { + // update slots for merge + memory[C].status = 1; + memory[C].next += memory[N].next; + if (NN != 0) memory[NN].previous = memory[C].next; + + // update freeList linked list entries + if (NPF != 0) memory[NPF + 2].index = C; + if (NNF != 0) memory[NNF + 1].index = C; + memory[C + 1].index = NPF; + memory[C + 2].index = NNF; + + // if N was the head then change head to C + if (freeList.head == N) freeList.head = C; + +#if DEBUG_ALLOCATOR + if (!validate()) + { + std::cout << "ERROR detected after mergeCN() IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << std::endl; + } +#endif + }; + + // standalone insertion of C into head of freeList + auto standalone = [&]() -> void { + memory[C].status = 1; + memory[C + 1].index = 0; + memory[C + 2].index = freeList.head; + + if (freeList.head != 0) + { + memory[freeList.head + 1] = C; // set previous heads previousFree to C. + } + + // set the head to C. + freeList.head = C; + + // Inserted new free slot so increment free count + ++freeList.count; + +#if DEBUG_ALLOCATOR + if (!validate()) + { + std::cout << "ERROR detected after standalone() IntrusiveAllocator::MemoryBlock::deallocate(" << ptr << ", " << size << ") " << this << " C = " << C << ", memory[C + 2].index = " << memory[C + 2].index << std::endl; + } +#endif + }; + + if (P != 0 && memory[P].status != 0) + { + if (N != 0 && memory[N].status != 0) + { + if ((static_cast(memory[P].next) + static_cast(memory[C].next) + static_cast(memory[N].next)) <= maxSize) + mergePCN(); + else if ((static_cast(memory[P].next) + static_cast(memory[C].next)) <= maxSize) + mergePC(); // merge P and C + else if ((static_cast(memory[C].next) + static_cast(memory[N].next)) <= maxSize) + mergeCN(); // merge C and N + else + standalone(); // C is standalone + } + else if ((static_cast(memory[P].next) + static_cast(memory[C].next)) <= maxSize) + mergePC(); // merge P and C + else + standalone(); // C is standalone + } + else if (N != 0 && memory[N].status != 0) + { + if (static_cast(memory[C].next) + static_cast(memory[N].next) <= maxSize) + mergeCN(); // merge C and N + else + standalone(); // standalone + } + else + { + // C is standalone + standalone(); + } + + return true; + } + +#if DEBUG_ALLOCATOR + std::cout << "IntrusiveAllocator::MemoryBlock::deallocate((" << ptr << ", " << size << ") OUTWITH block : " << this << std::endl; +#endif + + return false; +} + +void IntrusiveAllocator::MemoryBlock::report(std::ostream& out) const +{ + out << "MemoryBlock " << this << " " << name << std::endl; + out << " alignment = " << alignment << std::endl; + out << " blockAlignment = " << blockAlignment << std::endl; + out << " blockSize = " << blockSize << ", memory = " << static_cast(memory) << std::endl; + out << " maximumAllocationSize = " << maximumAllocationSize << std::endl; + out << " firstSlot = "<< firstSlot< allocated; + std::set available; + + while (position < capacity) + { + auto& slot = memory[position]; + if (slot.previous > capacity || slot.next > capacity) + { + std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " slot.corrupted invalid position = " << position << ", slot = {" << slot.previous << ", " << slot.next << ", " << int(slot.status) << "}" << std::endl; + return false; + } + + if (slot.status == 0) + allocated.insert(position); + else + available.insert(position); + + if (slot.previous != 0) + { + if (slot.previous > position) + { + std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " slot.previous invalid position = " << position << ", slot = {" << slot.previous << ", " << slot.next << ", " << int(slot.status) << "}" << std::endl; + return false; + } + size_t previous_position = position - slot.previous; + if (previous_position != previous) + { + std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed : previous slot = " << previous << " doesn't match slot.previous, position = " << position << ", slot = {" << slot.previous << ", " << slot.next << ", " << int(slot.status) << "}" << std::endl; + return false; + } + + size_t previousFree = memory[position + 1].index; + size_t nextFree = memory[position + 2].index; + if (previousFree == position || nextFree == position) + { + std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed : slot's previous/nextFree points back to itself, position = " << position << ", slot = {" << slot.previous << ", " << slot.next << ", " << int(slot.status) << "} previousFree = " << previousFree << ", nextFree = " << nextFree << std::endl; + return false; + } + } + + if (slot.next == 0) + { + std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed: position = " << position << " slot = {" << slot.previous << ", " << slot.next << ", " << static_cast(slot.status) << "}" << std::endl; + return false; + } + + previous = position; + position += slot.next; + } + + std::set inFreeList; + + // std::cout<<"No invalid entries found"<(slot.status) << "}" << std::endl; + return false; + } + + if (memory[freePosition + 1].index != previousPosition || memory[freePosition + 1].index == freePosition) + { + std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed, free list inconsisntent, head = " << freeList.head << ", previousPosition = " << previousPosition << ", freePosition = " << freePosition << ", slot = {" << slot.previous << ", " << slot.next << ", " << static_cast(slot.status) << "} previousFree = " << memory[freePosition + 1].index << ", nextFree = " << memory[freePosition + 2].index << std::endl; + return false; + } + + previousPosition = freePosition; + freePosition = memory[freePosition + 2].index; + } + } + + if (available.size() != inFreeList.size()) + { + std::cerr << "IntrusiveAllocator::MemoryBlock::validate() " << this << " validation failed, Different number of entries in available and in freeList: available.size() = " << available.size() << ", inFreeList.size() = " << inFreeList.size() << std::endl; + return false; + } + + return true; +} + +size_t IntrusiveAllocator::MemoryBlock::totalAvailableSize() const +{ + size_t count = 0; + size_t position = firstSlot; + while (position < capacity) + { + auto& slot = memory[position]; + position += slot.next; + if (slot.status != 0) count += slot.next - 1; + } + + return count * sizeof(Element); +} + +size_t IntrusiveAllocator::MemoryBlock::totalReservedSize() const +{ + size_t count = 0; + size_t position = firstSlot; + while (position < capacity) + { + auto& slot = memory[position]; + position += slot.next; + if (slot.status == 0) count += slot.next - 1; + } + + return count * sizeof(Element); +} + +size_t IntrusiveAllocator::MemoryBlock::totalMemorySize() const +{ + size_t count = 0; + size_t position = firstSlot; + while (position < capacity) + { + auto& slot = memory[position]; + position += slot.next; + count += slot.next - 1; + } + + return count * sizeof(Element); +} + +////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// +// MemoryBlocks +// +IntrusiveAllocator::MemoryBlocks::MemoryBlocks(IntrusiveAllocator* in_parent, const std::string& in_name, size_t in_blockSize, size_t in_alignment) : + parent(in_parent), + name(in_name), + alignment(in_alignment), + blockSize(in_blockSize), + maximumAllocationSize(IntrusiveAllocator::MemoryBlock::computeMaxiumAllocationSize(in_blockSize, in_alignment)) +{ +} + +IntrusiveAllocator::MemoryBlocks::~MemoryBlocks() +{ +} + +void* IntrusiveAllocator::MemoryBlocks::allocate(std::size_t size) +{ + if (memoryBlockWithSpace) + { + auto ptr = memoryBlockWithSpace->allocate(size); + if (ptr) return ptr; + } + + size_t new_blockSize = std::max(size, blockSize); + for (auto& block : memoryBlocks) + { + if (block != memoryBlockWithSpace) + { + auto ptr = block->allocate(size); + if (ptr) return ptr; + } + } + + auto new_block = std::make_shared(name, new_blockSize, alignment); + if (parent) + { + parent->memoryBlocks[new_block->memory] = new_block; + } + + if (memoryBlocks.empty()) + { + maximumAllocationSize = new_block->maximumAllocationSize; + } + + memoryBlockWithSpace = new_block; + memoryBlocks.push_back(new_block); + + auto ptr = new_block->allocate(size); + + return ptr; +} + +void IntrusiveAllocator::MemoryBlocks::report(std::ostream& out) const +{ + out << "IntrusiveAllocator::MemoryBlocks::report() memoryBlocks.size() = " << memoryBlocks.size() << std::endl; + for (auto& memoryBlock : memoryBlocks) + { + memoryBlock->report(out); + } +} + +bool IntrusiveAllocator::MemoryBlocks::validate() const +{ + bool valid = true; + for (auto& memoryBlock : memoryBlocks) + { + valid = memoryBlock->validate() && valid; + } + return valid; +} + +size_t IntrusiveAllocator::MemoryBlocks::deleteEmptyMemoryBlocks() +{ + size_t count = 0; + decltype(memoryBlocks) remainingBlocks; + for(auto& memoryBlock : memoryBlocks) + { + if (memoryBlock->totalReservedSize() == 0) + { + count += memoryBlock->totalAvailableSize(); + } + else + { + remainingBlocks.push_back(memoryBlock); + } + } + memoryBlocks.swap(remainingBlocks); + + return count; +} + +size_t IntrusiveAllocator::MemoryBlocks::totalAvailableSize() const +{ + size_t count = 0; + for(auto& memoryBlock : memoryBlocks) + { + count += memoryBlock->totalAvailableSize(); + } + return count; +} + +size_t IntrusiveAllocator::MemoryBlocks::totalReservedSize() const +{ + size_t count = 0; + for(auto& memoryBlock : memoryBlocks) + { + count += memoryBlock->totalReservedSize(); + } + return count; +} + +size_t IntrusiveAllocator::MemoryBlocks::totalMemorySize() const +{ + size_t count = 0; + for(auto& memoryBlock : memoryBlocks) + { + count += memoryBlock->totalMemorySize(); + } + return count; +} + +////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// +// IntrusiveAllocator +// +IntrusiveAllocator::IntrusiveAllocator(size_t in_defaultAlignment) : + Allocator(in_defaultAlignment) +{ + size_t Megabyte = size_t(1024) * size_t(1024); + size_t blockSize = size_t(1) * Megabyte; + + allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); +} + +IntrusiveAllocator::IntrusiveAllocator(std::unique_ptr in_nestedAllocator, size_t in_defaultAlignment) : + Allocator(std::move(in_nestedAllocator), in_defaultAlignment) +{ + size_t Megabyte = size_t(1024) * size_t(1024); + size_t blockSize = size_t(1) * Megabyte; + + allocatorMemoryBlocks.resize(vsg::ALLOCATOR_AFFINITY_LAST); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_OBJECTS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_OBJECTS", blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_DATA].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_DATA", size_t(16) * blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_NODES].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_NODES", blockSize, defaultAlignment)); + allocatorMemoryBlocks[vsg::ALLOCATOR_AFFINITY_PHYSICS].reset(new MemoryBlocks(this, "ALLOCATOR_AFFINITY_PHYSICS", blockSize, 16)); +} + +IntrusiveAllocator::~IntrusiveAllocator() +{ +} + +void IntrusiveAllocator::setBlockSize(AllocatorAffinity allocatorAffinity, size_t blockSize) +{ + std::scoped_lock lock(mutex); + + if (size_t(allocatorAffinity) < allocatorMemoryBlocks.size()) + { + allocatorMemoryBlocks[allocatorAffinity]->blockSize = blockSize; + } + else + { + auto name = vsg::make_string("MemoryBlocks_", allocatorAffinity); + + allocatorMemoryBlocks.resize(allocatorAffinity + 1); + allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, name, blockSize, defaultAlignment)); + } +} + +void IntrusiveAllocator::report(std::ostream& out) const +{ + out << "IntrusiveAllocator::report() " << allocatorMemoryBlocks.size() << std::endl; + + for (const auto& memoryBlock : allocatorMemoryBlocks) + { + if (memoryBlock) memoryBlock->report(out); + } + + validate(); +} + +void* IntrusiveAllocator::allocate(std::size_t size, AllocatorAffinity allocatorAffinity) +{ + std::scoped_lock lock(mutex); + + // create a MemoryBlocks entry if one doesn't already exist + if (allocatorAffinity > allocatorMemoryBlocks.size()) + { + size_t blockSize = 1024 * 1024; // Megabyte + allocatorMemoryBlocks.resize(allocatorAffinity + 1); + allocatorMemoryBlocks[allocatorAffinity].reset(new MemoryBlocks(this, "MemoryBlockAffinity", blockSize, defaultAlignment)); + } + + void* ptr = nullptr; + + auto& blocks = allocatorMemoryBlocks[allocatorAffinity]; + if (blocks) + { + if (size <= blocks->maximumAllocationSize) + { + ptr = blocks->allocate(size); + if (ptr) return ptr; + //std::cout<<"IntrusiveAllocator::allocate() Failed to allocator memory from memoryBlocks "<alignment}); + if (ptr) largeAllocations[ptr] = std::pair(blocks->alignment, size); + //std::cout<<"IntrusiveAllocator::allocate() MemoryBlocks aligned large allocation = "<maximumAllocationSize = "<maximumAllocationSize<(defaultAlignment, size); + //std::cout<<"IntrusiveAllocator::allocate() default aligned large allocation = "<second.first}); + largeAllocations.erase(la_itr); + return true; + } + + if (nestedAllocator && nestedAllocator->deallocate(ptr, size)) + { + return true; + } + + return false; +} + +bool IntrusiveAllocator::validate() const +{ + bool valid = true; + for (auto& memoryBlock : allocatorMemoryBlocks) + { + valid = memoryBlock->validate() && valid; + } + return valid; +} + +size_t IntrusiveAllocator::deleteEmptyMemoryBlocks() +{ + size_t count = 0; + for(auto& blocks : allocatorMemoryBlocks) + { + count += blocks->deleteEmptyMemoryBlocks(); + } + return count; +} + +size_t IntrusiveAllocator::totalAvailableSize() const +{ + size_t count = 0; + for(auto& blocks : allocatorMemoryBlocks) + { + count += blocks->totalAvailableSize(); + } + return count; +} + +size_t IntrusiveAllocator::totalReservedSize() const +{ + size_t count = 0; + for(auto& blocks : allocatorMemoryBlocks) + { + count += blocks->totalReservedSize(); + } + return count; +} + +size_t IntrusiveAllocator::totalMemorySize() const +{ + size_t count = 0; + for(auto& blocks : allocatorMemoryBlocks) + { + count += blocks->totalMemorySize(); + } + return count; +} diff --git a/src/vsg/core/MemorySlots.cpp b/src/vsg/core/MemorySlots.cpp index 05258ea3e..18e4c8b63 100644 --- a/src/vsg/core/MemorySlots.cpp +++ b/src/vsg/core/MemorySlots.cpp @@ -10,7 +10,7 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI */ -#include +#include #include #include #include diff --git a/src/vsg/io/Path.cpp b/src/vsg/io/Path.cpp index 958589a64..96ba4efeb 100644 --- a/src/vsg/io/Path.cpp +++ b/src/vsg/io/Path.cpp @@ -13,6 +13,7 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI #include #include +#include #include using namespace vsg; From a37b6910f6e2b654857adc6d41e79760264c75ef Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Mon, 1 Jul 2024 18:43:19 +0100 Subject: [PATCH 38/43] Ran clang-format --- include/vsg/core/Allocator.h | 3 +-- src/vsg/core/Allocator.cpp | 2 +- src/vsg/core/IntrusiveAllocator.cpp | 21 ++++++++++----------- src/vsg/core/MemorySlots.cpp | 2 +- src/vsg/io/Path.cpp | 2 +- src/vsg/vk/DeviceMemory.cpp | 3 +-- 6 files changed, 15 insertions(+), 18 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index d0ad0d997..1847e0ed9 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -14,8 +14,8 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI #include -#include #include +#include namespace vsg { @@ -130,5 +130,4 @@ namespace vsg template using allocator_affinity_physics = allocator_affinity_adapter; - } // namespace vsg diff --git a/src/vsg/core/Allocator.cpp b/src/vsg/core/Allocator.cpp index 7c8ea9805..6999c5596 100644 --- a/src/vsg/core/Allocator.cpp +++ b/src/vsg/core/Allocator.cpp @@ -10,8 +10,8 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI */ -#include #include +#include #include #include #include diff --git a/src/vsg/core/IntrusiveAllocator.cpp b/src/vsg/core/IntrusiveAllocator.cpp index 7529d4066..04001f69d 100644 --- a/src/vsg/core/IntrusiveAllocator.cpp +++ b/src/vsg/core/IntrusiveAllocator.cpp @@ -10,8 +10,8 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI */ -#include #include +#include #include #include #include @@ -56,7 +56,6 @@ IntrusiveAllocator::MemoryBlock::MemoryBlock(const std::string& in_name, size_t // // vsg::debug(" capacity = ", capacity, ", max_slot_size = ", max_slot_size); - // set up the free tracking to encompass the whole buffer // start at element before the first aligned element so that position 0 can be used to mark beginning or end of free lists freeLists.emplace_back(); @@ -540,7 +539,7 @@ void IntrusiveAllocator::MemoryBlock::report(std::ostream& out) const out << " blockAlignment = " << blockAlignment << std::endl; out << " blockSize = " << blockSize << ", memory = " << static_cast(memory) << std::endl; out << " maximumAllocationSize = " << maximumAllocationSize << std::endl; - out << " firstSlot = "<< firstSlot< */ -#include #include +#include #include #include diff --git a/src/vsg/io/Path.cpp b/src/vsg/io/Path.cpp index 96ba4efeb..27b5cbdbd 100644 --- a/src/vsg/io/Path.cpp +++ b/src/vsg/io/Path.cpp @@ -13,8 +13,8 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI #include #include -#include #include +#include using namespace vsg; diff --git a/src/vsg/vk/DeviceMemory.cpp b/src/vsg/vk/DeviceMemory.cpp index f62d4ed51..6ba6ce2d1 100644 --- a/src/vsg/vk/DeviceMemory.cpp +++ b/src/vsg/vk/DeviceMemory.cpp @@ -28,7 +28,7 @@ DeviceMemoryList vsg::getActiveDeviceMemoryList(VkMemoryPropertyFlagBits propert { std::scoped_lock lock(s_DeviceMemoryListMutex); DeviceMemoryList dml; - for(auto& dm : s_DeviceMemoryList) + for (auto& dm : s_DeviceMemoryList) { auto dm_ref_ptr = dm.ref_ptr(); if ((dm_ref_ptr->getMemoryPropertyFlags() & propertyFlags) != 0) @@ -191,4 +191,3 @@ size_t DeviceMemory::totalMemorySize() const { return _memorySlots.totalMemorySize(); } - From c7affed82904361f717f3ec9bcbf7642ae81431c Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 2 Jul 2024 09:57:45 +0100 Subject: [PATCH 39/43] Fixed cppcheck issues --- include/vsg/core/IntrusiveAllocator.h | 4 ++-- src/vsg/core/IntrusiveAllocator.cpp | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/include/vsg/core/IntrusiveAllocator.h b/include/vsg/core/IntrusiveAllocator.h index e02c0b403..3d56d9388 100644 --- a/include/vsg/core/IntrusiveAllocator.h +++ b/include/vsg/core/IntrusiveAllocator.h @@ -95,7 +95,7 @@ namespace vsg using Status = decltype(status); using Index = decltype(index); - Element(size_t in_index) : + explicit Element(size_t in_index) : index(static_cast(in_index)) {} Element(size_t in_previous, size_t in_next, unsigned int in_status) : @@ -130,7 +130,7 @@ namespace vsg bool freeSlotsAvaible(size_t size) const; - inline bool within(void* ptr) const { return memory <= ptr && ptr < memoryEnd; } + inline bool within(const void* ptr) const { return memory <= ptr && ptr < memoryEnd; } size_t totalAvailableSize() const; size_t totalReservedSize() const; diff --git a/src/vsg/core/IntrusiveAllocator.cpp b/src/vsg/core/IntrusiveAllocator.cpp index 04001f69d..17a80b891 100644 --- a/src/vsg/core/IntrusiveAllocator.cpp +++ b/src/vsg/core/IntrusiveAllocator.cpp @@ -380,12 +380,12 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ if (freeList.head == N) { freeList.head = P; - memory[P + 1] = 0; + memory[P + 1].index = 0; } else { memory[P + 1].index = NPF; - if (NPF != 0) memory[NPF + 2] = P; + if (NPF != 0) memory[NPF + 2].index = P; } } else // P and N aren't directly connected within the freeList @@ -474,7 +474,7 @@ bool IntrusiveAllocator::MemoryBlock::deallocate(void* ptr, std::size_t /*size*/ if (freeList.head != 0) { - memory[freeList.head + 1] = C; // set previous heads previousFree to C. + memory[freeList.head + 1].index = C; // set previous heads previousFree to C. } // set the head to C. From 3a4cf6b1d70d905465d5baa4b142243361afccb8 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 2 Jul 2024 10:02:46 +0100 Subject: [PATCH 40/43] Quietened down silly cppcheck warnings on valid code that is better than the suggested alternative. --- cmake/cppcheck-suppression-list.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cmake/cppcheck-suppression-list.txt b/cmake/cppcheck-suppression-list.txt index 154ebc34d..ccf3afc76 100644 --- a/cmake/cppcheck-suppression-list.txt +++ b/cmake/cppcheck-suppression-list.txt @@ -48,6 +48,7 @@ useStlAlgorithm:*/src/vsg/vk/Instance.cpp useStlAlgorithm:*/src/vsg/vk/RenderPass.cpp useStlAlgorithm:*/src/vsg/core/Allocator.cpp useStlAlgorithm:*/src/vsg/core/MemorySlots.cpp +useStlAlgorithm:*/src/vsg/core/IntrusiveAllocator.cpp useStlAlgorithm:*/src/vsg/state/ArrayState.cpp useStlAlgorithm:*/src/vsg/app/CompileTraversal.cpp useStlAlgorithm:*/src/vsg/utils/ShaderSet.cpp @@ -74,6 +75,7 @@ syntaxError:*include/vsg/core/Data.h:51 unusedStructMember:*include/vsg/core/Data.h unusedStructMember:*include/vsg/core/Exception.h unusedStructMember:*include/vsg/core/Version.h +unusedStructMember:*include/vsg/core/IntrusiveAllocator.h unusedStructMember:*include/vsg/io/ObjectCache.h unusedStructMember:*include/vsg/nodes/Bin.h unusedStructMember:*include/vsg/nodes/LOD.h @@ -145,6 +147,8 @@ returnTempReference:*/include/vsg/core/Inherit.h variableScope:*/include/vsg/utils/SharedObjects.h variableScope:*/src/vsg/utils/SharedObjects.cpp variableScope:*/src/vsg/app/CompileManager.cpp +variableScope:*/src/vsg/core/IntrusiveAllocator.cpp // suppress really stupid warning of pointerLessThanZero pointerLessThanZero:*/src/vsg/app/Viewer.cpp + From 1a9dd577673469585628bef8f1472395a164a394 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 2 Jul 2024 12:33:23 +0100 Subject: [PATCH 41/43] Ran build_all --- include/vsg/all.h | 1 + 1 file changed, 1 insertion(+) diff --git a/include/vsg/all.h b/include/vsg/all.h index cae892c94..d88ba9be3 100644 --- a/include/vsg/all.h +++ b/include/vsg/all.h @@ -24,6 +24,7 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI #include #include #include +#include #include #include #include From 447cdf54976a4207a4fcf74f569e0a9db62552a6 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 2 Jul 2024 12:58:54 +0100 Subject: [PATCH 42/43] Quietened down debug messages --- include/vsg/core/Allocator.h | 3 ++- src/vsg/vk/Context.cpp | 8 ++++---- src/vsg/vk/DeviceMemory.cpp | 4 ++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/include/vsg/core/Allocator.h b/include/vsg/core/Allocator.h index 1847e0ed9..568506aa3 100644 --- a/include/vsg/core/Allocator.h +++ b/include/vsg/core/Allocator.h @@ -12,8 +12,9 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI */ -#include +#include +#include #include #include diff --git a/src/vsg/vk/Context.cpp b/src/vsg/vk/Context.cpp index 3413fea2b..ba4813280 100644 --- a/src/vsg/vk/Context.cpp +++ b/src/vsg/vk/Context.cpp @@ -103,21 +103,21 @@ Context::Context(Device* in_device, const ResourceRequirements& in_resourceRequi if (!deviceMemoryBufferPools) { device->deviceMemoryBufferPools = deviceMemoryBufferPools = MemoryBufferPools::create("Device_MemoryBufferPool", device, in_resourceRequirements); - vsg::info("Context::Context() creating new deviceMemoryBufferPools = ", deviceMemoryBufferPools); + vsg::debug("Context::Context() creating new deviceMemoryBufferPools = ", deviceMemoryBufferPools); } else { - vsg::info("Context::Context() reusing deviceMemoryBufferPools = ", deviceMemoryBufferPools); + vsg::debug("Context::Context() reusing deviceMemoryBufferPools = ", deviceMemoryBufferPools); } if (!stagingMemoryBufferPools) { device->stagingMemoryBufferPools = stagingMemoryBufferPools = MemoryBufferPools::create("Staging_MemoryBufferPool", device, in_resourceRequirements); - vsg::info("Context::Context() creating new stagingMemoryBufferPools = ", stagingMemoryBufferPools); + vsg::debug("Context::Context() creating new stagingMemoryBufferPools = ", stagingMemoryBufferPools); } else { - vsg::info("Context::Context() reusing stagingMemoryBufferPools = ", stagingMemoryBufferPools); + vsg::debug("Context::Context() reusing stagingMemoryBufferPools = ", stagingMemoryBufferPools); } } diff --git a/src/vsg/vk/DeviceMemory.cpp b/src/vsg/vk/DeviceMemory.cpp index 6ba6ce2d1..fc3cda084 100644 --- a/src/vsg/vk/DeviceMemory.cpp +++ b/src/vsg/vk/DeviceMemory.cpp @@ -94,7 +94,7 @@ DeviceMemory::DeviceMemory(Device* device, const VkMemoryRequirements& memRequir { std::scoped_lock lock(s_DeviceMemoryListMutex); s_DeviceMemoryList.emplace_back(this); - vsg::info("DeviceMemory::DeviceMemory() added to s_DeviceMemoryList, s_DeviceMemoryList.size() = ", s_DeviceMemoryList.size()); + vsg::debug("DeviceMemory::DeviceMemory() added to s_DeviceMemoryList, s_DeviceMemoryList.size() = ", s_DeviceMemoryList.size()); } } @@ -115,7 +115,7 @@ DeviceMemory::~DeviceMemory() if (itr != s_DeviceMemoryList.end()) { s_DeviceMemoryList.erase(itr); - vsg::info("DeviceMemory::~DeviceMemory() removed from s_DeviceMemoryList, s_DeviceMemoryList.size() = ", s_DeviceMemoryList.size()); + vsg::debug("DeviceMemory::~DeviceMemory() removed from s_DeviceMemoryList, s_DeviceMemoryList.size() = ", s_DeviceMemoryList.size()); } else { From a34563eeb78c5289352c5bdcd4acb9a3d41677dd Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Tue, 2 Jul 2024 13:09:24 +0100 Subject: [PATCH 43/43] Tightened up types --- include/vsg/core/IntrusiveAllocator.h | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/include/vsg/core/IntrusiveAllocator.h b/include/vsg/core/IntrusiveAllocator.h index 3d56d9388..11ffc5dd1 100644 --- a/include/vsg/core/IntrusiveAllocator.h +++ b/include/vsg/core/IntrusiveAllocator.h @@ -95,10 +95,10 @@ namespace vsg using Status = decltype(status); using Index = decltype(index); - explicit Element(size_t in_index) : + explicit Element(Index in_index) : index(static_cast(in_index)) {} - Element(size_t in_previous, size_t in_next, unsigned int in_status) : + Element(Offset in_previous, Offset in_next, Status in_status) : previous(static_cast(in_previous)), next(static_cast(in_next)), status(in_status) {} @@ -136,6 +136,7 @@ namespace vsg size_t totalReservedSize() const; size_t totalMemorySize() const; + // used for debugging only. struct VSG_DECLSPEC SlotTester { SlotTester(Element* in_mem, size_t in_head) :