diff --git a/CHANGELOG.MD b/CHANGELOG.MD new file mode 100644 index 00000000..43c324a0 --- /dev/null +++ b/CHANGELOG.MD @@ -0,0 +1,16 @@ +0.2 +--- +* added temporary_allocator as portable alloca +* added small_node_pool type optimized for low-overhead small object allocations +* added various allocator adapters including a thread_safe_allocator for locking +* better compiler support +* many internal changes and bugfixes + +0.1-1 +----- +* critical bugfix in memory_stack +* added smart pointer example + +0.1 +--- +* first beta version diff --git a/CMakeLists.txt b/CMakeLists.txt index 815fe385..3969f457 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -6,11 +6,49 @@ else() project(foonathan_memory) endif() +include(CheckCXXSourceCompiles) +include(CheckCXXCompilerFlag) +CHECK_CXX_COMPILER_FLAG(-std=c++11 cpp11_flag) +if (cpp11_flag) + set(CMAKE_REQUIRED_FLAGS "-std=c++11") +else() + CHECK_CXX_COMPILER_FLAG(-std=c++0x cpp0x_flag) + if (cpp0x_flag) + set(CMAKE_REQUIRED_FLAGS "-std=c++0x") + endif(cpp0x_flag) +endif(cpp11_flag) + +CHECK_CXX_SOURCE_COMPILES("int main() {int i = alignof(int);}" comp_alignof) +CHECK_CXX_SOURCE_COMPILES("#include + using namespace std; + int main() {max_align_t val;}" comp_max_align) +CHECK_CXX_SOURCE_COMPILES("#include + int main() {auto handler = std::get_new_handler();}" comp_new_handler) +CHECK_CXX_SOURCE_COMPILES("thread_local int i; int main() {}" comp_thread_local) +CHECK_CXX_SOURCE_COMPILES("constexpr auto foo = 1; int main(){}" comp_constexpr) +CHECK_CXX_SOURCE_COMPILES("void foo() noexcept {} int main(){}" comp_noexcept) + +option(FOONATHAN_IMPL_HAS_ALIGNOF "whether or not alignof is available" ${comp_alignof}) +option(FOONATHAN_IMPL_HAS_MAX_ALIGN "whether or not std::max_align_t is available" ${comp_max_align}) +option(FOONATHAN_IMPL_HAS_GET_NEW_HANDLER "whether or not std::get_new_handler() is available" ${comp_new_handler}) +option(FOONATHAN_IMPL_HAS_THREAD_LOCAL "whether or not thread_local is available" ${comp_thread_local}) +option(FOONATHAN_IMPL_HAS_CONSTEXPR "whether or not constexpr is available" ${comp_constexpr}) +option(FOONATHAN_IMPL_HAS_NOEXCEPT "whether or not noexcept is available" ${comp_noexcept}) + +set(version_major 0 CACHE INTERNAL "") +set(version_minor 2 CACHE INTERNAL "") + +set(FOONATHAN_MEMORY_DEFAULT_ALLOCATOR heap_allocator CACHE STRING + "the default implementation allocator for higher-level ones") +option(FOONATHAN_MEMORY_THREAD_SAFE_ADAPTER "whether or not raw_allocator_adapter is thread safe by default" ON) + +configure_file("${CMAKE_CURRENT_SOURCE_DIR}/config.hpp.in" + "${CMAKE_CURRENT_BINARY_DIR}/config_impl.hpp") + if (BIICODE) ADD_BIICODE_TARGETS() - set(targets ${BII_BLOCK_TARGETS} CACHE INTERNAL "") - ACTIVATE_CPP11() + target_include_directories(${BII_BLOCK_TARGET} INTERFACE ${CMAKE_CURRENT_BINARY_DIR}) else() set(src detail/align.hpp @@ -19,8 +57,13 @@ else() detail/free_list.cpp detail/free_list.hpp detail/memory_stack.hpp + detail/small_free_list.cpp + detail/small_free_list.hpp + aligned_allocator.hpp allocator_adapter.hpp allocator_traits.hpp + config.hpp + default_allocator.hpp heap_allocator.cpp heap_allocator.hpp new_allocator.cpp @@ -29,19 +72,35 @@ else() pool_collection.cpp pool_collection.hpp pool_type.hpp + pool_type.cpp raw_allocator_base.hpp smart_ptr.hpp stack_allocator.hpp std_allocator_base.hpp + temporary_allocator.cpp + temporary_allocator.hpp + threading.hpp tracking.hpp CACHE INTERNAL "") add_library(foonathan_memory ${src}) - add_executable(foonathan_memory_example_allocator ${src} example/allocator.cpp) - add_executable(foonathan_memory_example_smart_ptr ${src} example/smart_ptr.cpp) + add_executable(foonathan_memory_example_allocator example/allocator.cpp) + add_executable(foonathan_memory_example_smart_ptr example/smart_ptr.cpp) + add_executable(foonathan_memory_example_temporary example/temporary.cpp) - set(targets foonathan_memory foonathan_memory_example_allocator foonathan_memory_example_smart_ptr CACHE INTERNAL "") + target_link_libraries(foonathan_memory_example_allocator PUBLIC foonathan_memory) + target_link_libraries(foonathan_memory_example_smart_ptr PUBLIC foonathan_memory) + target_link_libraries(foonathan_memory_example_temporary PUBLIC foonathan_memory) + + set(targets foonathan_memory + foonathan_memory_example_allocator + foonathan_memory_example_smart_ptr + foonathan_memory_example_temporary + CACHE INTERNAL "") set_target_properties(${targets} PROPERTIES CXX_STANDARD 11) - set_target_properties(${targets} PROPERTIES CXX_STANDARD_REQUIRED ON) + + foreach(target ${targets}) + target_include_directories(${target} PUBLIC ${CMAKE_CURRENT_BINARY_DIR}) + endforeach() endif() diff --git a/README.md b/README.md index 9da03549..ba8c8fcd 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,13 @@ memory ====== -This library provides various memory allocators for high-performance allocation and deallocation. These allocators are provided in the form of a new allocator concept: RawAllocator. A RawAllocator is an improved version over the classical STL-Allocator. There are various wrapper classes and traits to convert between the two types. Each RawAllocator has the following interface or an appropriate specialization of the raw_allocator_traits: +The C++ STL allocator model has various flaws. For example, they are fixed to a certain type, because they are almost necessarily required to be templates. So you can't easily share a single allocator for multiple types. In addition, you can only get a copy from the containers and not the original allocator object. At least with C++11 they are allowed to be stateful and so can be made object not instance based. But still, the model has many flaws. +Over the course of the years many solutions have been proposed. for example EASTL[1]. This library is another. But instead of trying to change the STL, it works with the current implementation. + +RawAllocator +------------ +This library provides a new allocator concept, a *RawAllocator*. Where the classic *Allocator* works with types and similar to new/delete, a *RawAllocator* works with raw memory (hence the name) and is similar to ::operator new/::operator delete or malloc/free. This allows it to be decoupled from concrete types and stops the need for templates (still, almost all allocators in this library are templates to allow maximum flexibility, but they are not required). +Another difference is the seperation between node and array allocations. A node is a single object, like an element of std::list. An array is a collection of such nodes. This is useful for memory pools, where there need to be a different approach depending on whether it is an array or node allocation. +In addition, the *RawAllocator* support alignment requirement. The memory can be aligned to certain boundaries. This allows fine tuned allocation and efficiency. The required interface for *RawAllocator* is as follows: // A raw allocator, only supports raw memory allocation. // Similar to ::operator new/malloc. This avoids the need to be templated and to use one allocator for multiple types. @@ -10,6 +17,7 @@ This library provides various memory allocators for high-performance allocation public: // Whether or not the allocator is stateful. // Non-stateful allocators don't need to be stored and can be default constructed on the fly. + // Thus, it is probably an empty type. using is_stateful = std::true_type/std::false_type; // The allocator is required to be moveable @@ -27,9 +35,11 @@ This library provides various memory allocators for high-performance allocation void* allocate_array(std::size_t count, std::size_t size, std::size_t alignment); // Deallocates memory for a node. Must not throw. + // Precondition: node must come from allocate_node with the same parameters and must not be null. void deallocate_node(void *node, std::size_t size, std::size_t alignment) noexcept; // Deallocates memory for an array of nodes. Must not throw. + // Precondition: array must come from allocate_array with the same paramters and must not be null. void deallocate_array(void *array, std::size_t count, std::size_t size, std::size_t alignment) noexcept; // Returns the maximum size of a node, inclusive. Should not throw. @@ -40,4 +50,34 @@ This library provides various memory allocators for high-performance allocation // Returns the maximum supported alignment, inclusive. Should not throw. std::size_t max_alignment() const; - }; + }; +Of course, there is a traits class for classes that do not support this interface directly - like STL-Allocators! +There are currently the following classes that model *RawAllocator* or have specialized the traits in this library: +* heap_allocator - Allocates memory using malloc/free +* new_allocator - Allocates memory using ::operator new/::operator delete +* memory_stack - Allocates huge blocks of memory, then can be used in a stack manner, deallocation only via unwinding +* memory_pool - Allocates huge blocks of memory and seperates them into nodes of given size, great if you have multiple objects of the same size +* memory_pool_collection - Maintains multiple memory_pools at once to allow different sized allocation. + +The last three are special allocators. They allocate a big block of memory and give it out one by one. If the block is exhausted, a new is one allocated. The block allocation can be controlled via a template parameter, they use a given implementation *RawAllocator* for it (default is heap_allocator). + +Adapters +-------- +A new allocator model just by itself would be useless, because it can't be used with the existing model. For this case, there are adapters. The engine is the allocator_reference. This class stores a pointer to a *RawAllocator*. It allows copying of allocator classes as it is required by the STL containers. raw_allocator_allocator is a normal *Allocator* that stores one such allocator_reference. It forwards all allocation requests to it and thus to a user defined *RawAllocator*. Since the get_allocator() function returns a copy of the *Allocator* but allocator_reference stores a pointer, you can still access the original used allocator from a container. The new propagate_on_XXX members in raw_allocator_allocator have all been set to std::true_type. This ensure that an allocator always stays with its memory and allows fast moving. The raw_allocator_allocator thus allows that *RawAllocator* classes can be used with STL containers. + +The new smart pointer classes don't use *Allocator* classes, they use *Deleter*. But there are also adapters for those and new raw_allocate_unique/shared function to easily create smart pointers whose memory is managed by *RawAllocator*. + +There are also tracking adapters. A *Tracker* provides functions that are called on certain events, such as memory allocation or allocator growth (when they allocate new blocks from the implementation allocator). tracked_allocator takes a *RawAllocator* and a *Tracker* and combines them. This allows easily monitoring of memory usage. Due to the power of templates, tracked_allocator works with all classes modelling the concept of *RawAllocator* including user defined ones. + +Other adapters include a thread safe wrapper, that locks a mutex prior to accessing, or another ensuring a certain minimum alignment. + +Compiler Support +---------------- +This library has been successfully compiled under the following compilers: +* GCC 4.7-4.9 on Linux +* clang 3.4-3.5 on Linux +* Visual Studio 12 on Windows + +There are compatibility options and replacement macros for alignof, thread_local, constexpr and noexcept and workarounds for missing std::max_align_t and std::get_new_handler(). + +[1] EASTL - http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2271.html diff --git a/aligned_allocator.hpp b/aligned_allocator.hpp new file mode 100644 index 00000000..9d4db4fb --- /dev/null +++ b/aligned_allocator.hpp @@ -0,0 +1,124 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#ifndef FOONATHAN_MEMORY_ALIGNED_ALLOCATOR_HPP_INCLUDED +#define FOONATHAN_MEMORY_ALIGNED_ALLOCATOR_HPP_INCLUDED + +/// \file +/// \brief An allocator ensuring a certain alignment + +#include +#include + +#include "allocator_traits.hpp" + +namespace foonathan { namespace memory +{ + /// \brief A \c RawAllocator adapter that ensures a minimum alignment. + /// \details It changes the alignment requirements passed to the allocation function if necessary + /// and forwards to the wrapped allocator. + /// \ingroup memory + template + class aligned_allocator : RawAllocator + { + using traits = allocator_traits; + public: + using raw_allocator = RawAllocator; + using is_stateful = std::true_type; + + /// \brief Creates it passing it the minimum alignment requirement. + /// \details It must be less than the maximum supported alignment. + explicit aligned_allocator(std::size_t min_alignment, raw_allocator &&alloc = {}) + : raw_allocator(std::move(alloc)), min_alignment_(min_alignment) + { + assert(min_alignment_ <= max_alignment()); + } + + /// @{ + /// \brief (De-)Allocation functions ensure the given minimum alignemnt. + /// \details If the alignment requirement is higher, it is unchanged. + void* allocate_node(std::size_t size, std::size_t alignment) + { + alignment = std::max(min_alignment_, alignment); + return traits::allocate_node(get_allocator(), size, alignment); + } + + void* allocate_array(std::size_t count, std::size_t size, std::size_t alignment) + { + alignment = std::min(min_alignment_, alignment); + return traits::allocate_array(get_allocator(), count, size, alignment); + } + + void deallocate_node(void *ptr, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT + { + alignment = std::max(min_alignment_, alignment); + traits::deallocate_node(get_allocator(), ptr, size, alignment); + } + + void deallocate_array(void *ptr, std::size_t count, + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT + { + alignment = std::max(min_alignment_, alignment); + traits::deallocate_array(get_allocator(), ptr, count, size, alignment); + } + /// @} + + std::size_t max_node_size() const + { + return traits::max_node_size(get_allocator()); + } + + std::size_t max_array_size() const + { + return traits::max_array_size(get_allocator()); + } + + std::size_t max_alignment() const + { + return traits::max_alignment(get_allocator()); + } + + /// @{ + /// \brief Returns a reference to the actual allocator. + raw_allocator& get_allocator() FOONATHAN_NOEXCEPT + { + return *this; + } + + const raw_allocator& get_allocator() const FOONATHAN_NOEXCEPT + { + return *this; + } + /// @} + + /// @{ + /// \brief Get/set the minimum alignment. + std::size_t min_alignment() const FOONATHAN_NOEXCEPT + { + return min_alignment_; + } + + void set_min_alignment(std::size_t min_alignment) + { + assert(min_alignment <= max_alignment()); + min_alignment_ = min_alignment; + } + /// @} + + private: + std::size_t min_alignment_; + }; + + /// \brief Creates an \ref aligned_allocator. + /// \relates aligned_allocator + template + auto make_aligned_allocator(std::size_t min_alignment, RawAllocator &&allocator) FOONATHAN_NOEXCEPT + -> aligned_allocator::type> + { + return aligned_allocator::type> + {min_alignment, std::forward(allocator)}; + } +}} // namespace foonathan::memory + +#endif // FOONATHAN_MEMORY_ALIGNED_ALLOCATOR_HPP_INCLUDED diff --git a/allocator_adapter.hpp b/allocator_adapter.hpp index ed2be3f0..f4fcd8d8 100644 --- a/allocator_adapter.hpp +++ b/allocator_adapter.hpp @@ -12,24 +12,104 @@ #include #include +#include "config.hpp" #include "allocator_traits.hpp" +#include "threading.hpp" #include "tracking.hpp" namespace foonathan { namespace memory { + /// \brief Wraps any class that has specialized the \ref allocator_traits and gives it the proper interface. + /// \details It just forwards all function to the traits and makes it easier to use them. + /// \ingroup memory + template + class allocator_adapter : RawAllocator + { + using traits = allocator_traits; + public: + using raw_allocator = RawAllocator; + using is_stateful = typename traits::is_stateful; + + allocator_adapter(raw_allocator &&alloc = {}) + : raw_allocator(std::move(alloc)) {} + + void* allocate_node(std::size_t size, std::size_t alignment) + { + return traits::allocate_node(get_allocator(), size, alignment); + } + + void* allocate_array(std::size_t count, std::size_t size, std::size_t alignment) + { + return traits::allocate_array(get_allocator(), count, size, alignment); + } + + void deallocate_node(void *ptr, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT + { + traits::deallocate_node(get_allocator(), ptr, size, alignment); + } + + void deallocate_array(void *ptr, std::size_t count, + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT + { + traits::deallocate_array(get_allocator(), ptr, count, size, alignment); + } + + std::size_t max_node_size() const + { + return traits::max_node_size(get_allocator()); + } + + std::size_t max_array_size() const + { + return traits::max_array_size(get_allocator()); + } + + std::size_t max_alignment() const + { + return traits::max_alignment(get_allocator()); + } + + /// @{ + /// \brief Returns a reference to the actual allocator. + raw_allocator& get_allocator() FOONATHAN_NOEXCEPT + { + return *this; + } + + const raw_allocator& get_allocator() const FOONATHAN_NOEXCEPT + { + return *this; + } + /// @} + }; + + /// \brief Creates an \ref allocator_adapter. + /// \relates allocator_adapter + template + auto make_allocator_adapter(RawAllocator &&allocator) FOONATHAN_NOEXCEPT + -> allocator_adapter::type> + { + return {std::forward(allocator)}; + } + namespace detail { + // stores a pointer to an allocator template - class allocator_storage + class allocator_reference_impl { public: - allocator_storage(RawAllocator &allocator) noexcept + allocator_reference_impl(RawAllocator &allocator) FOONATHAN_NOEXCEPT : alloc_(&allocator) {} + allocator_reference_impl(const allocator_reference_impl &) FOONATHAN_NOEXCEPT = default; + allocator_reference_impl& operator=(const allocator_reference_impl&) FOONATHAN_NOEXCEPT = default; protected: - ~allocator_storage() = default; + ~allocator_reference_impl() = default; + + using reference_type = RawAllocator&; - RawAllocator& get_allocator() const noexcept + reference_type get_allocator() const FOONATHAN_NOEXCEPT { return *alloc_; } @@ -38,108 +118,161 @@ namespace foonathan { namespace memory RawAllocator *alloc_; }; + // doesn't store anything for stateless allocators + // construct an instance on the fly template - class allocator_storage + class allocator_reference_impl { public: - allocator_storage() noexcept = default; - allocator_storage(const RawAllocator&) noexcept {} + allocator_reference_impl() FOONATHAN_NOEXCEPT = default; + allocator_reference_impl(const RawAllocator&) FOONATHAN_NOEXCEPT {} + allocator_reference_impl(const allocator_reference_impl &) FOONATHAN_NOEXCEPT = default; + allocator_reference_impl& operator=(const allocator_reference_impl&) FOONATHAN_NOEXCEPT = default; protected: - ~allocator_storage() = default; + ~allocator_reference_impl() = default; - RawAllocator get_allocator() const noexcept + using reference_type = RawAllocator; + + reference_type get_allocator() const FOONATHAN_NOEXCEPT { return {}; } }; } // namespace detail - /// \brief A \ref concept::RawAllocator storing a pointer to an allocator. - /// \detail It adapts any class by forwarding all requests to the stored allocator via the \ref allocator_traits.
- /// It is copy- and moveable. + /// \brief A \ref concept::RawAllocator storing a pointer to an allocator, thus making it copyable. + /// \details It adapts any class by forwarding all requests to the stored allocator via the \ref allocator_traits.
+ /// A mutex or \ref dummy_mutex can be specified that is locked prior to accessing the allocator.
+ /// For stateless allocators there is no locking or storing overhead whatsover, + /// they are just created as needed on the fly. /// \ingroup memory - template > - class raw_allocator_adapter - : detail::allocator_storage + template + class allocator_reference + : detail::allocator_reference_impl::is_stateful::value>, + detail::mutex_storage> { - using storage = detail::allocator_storage; + using traits = allocator_traits; + using storage = detail::allocator_reference_impl; + using actual_mutex = const detail::mutex_storage>; public: using raw_allocator = RawAllocator; - using is_stateful = typename Traits::is_stateful; - + using mutex = Mutex; + + using is_stateful = typename traits::is_stateful; + + /// @{ /// \brief Creates it giving it the \ref allocator_type. - /// \detail For non-stateful allocators, there exists a default-constructor and a version taking const-ref. - /// For stateful allocators it takes a non-const reference.
- /// Only stateful allocators are stored, non-stateful default-constructed on the fly. - using storage::storage; - + /// \details For non-stateful allocators, there exists a default-constructor and a version taking const-ref. + /// For stateful allocators it takes a non-const reference. + allocator_reference(const raw_allocator &alloc = {}) FOONATHAN_NOEXCEPT + : storage(alloc) {} + + allocator_reference(raw_allocator &alloc) FOONATHAN_NOEXCEPT + : storage(alloc) {} + /// @} + + /// @{ + /// \brief All concept functions lock the mutex and call the function on the referenced allocator. void* allocate_node(std::size_t size, std::size_t alignment) { + std::lock_guard lock(*this); auto&& alloc = get_allocator(); - return Traits::allocate_node(alloc, size, alignment); + return traits::allocate_node(alloc, size, alignment); } void* allocate_array(std::size_t count, std::size_t size, std::size_t alignment) { + std::lock_guard lock(*this); auto&& alloc = get_allocator(); - return Traits::allocate_array(alloc, count, size, alignment); + return traits::allocate_array(alloc, count, size, alignment); } - void deallocate_node(void *ptr, std::size_t size, std::size_t alignment) noexcept + void deallocate_node(void *ptr, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { + std::lock_guard lock(*this); auto&& alloc = get_allocator(); - Traits::deallocate_node(alloc, ptr, size, alignment); + traits::deallocate_node(alloc, ptr, size, alignment); } void deallocate_array(void *array, std::size_t count, - std::size_t size, std::size_t alignment) noexcept + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { + std::lock_guard lock(*this); auto&& alloc = get_allocator(); - Traits::deallocate_array(alloc, array, count, size, alignment); + traits::deallocate_array(alloc, array, count, size, alignment); } - /// \brief Returns the \ref allocator_type. - /// \detail It is a reference for stateful allocators and a temporary for non-stateful. - auto get_allocator() const noexcept - -> decltype(this->storage::get_allocator()) + std::size_t max_node_size() const { - return storage::get_allocator(); + std::lock_guard lock(*this); + auto&& alloc = get_allocator(); + return traits::max_node_size(alloc); } - std::size_t max_node_size() const noexcept + std::size_t max_array_size() const { + std::lock_guard lock(*this); auto&& alloc = get_allocator(); - return Traits::max_node_size(alloc); + return traits::max_array_size(alloc); } - std::size_t max_array_size() const noexcept + std::size_t max_alignment() const { + std::lock_guard lock(*this); auto&& alloc = get_allocator(); - return Traits::max_array_size(alloc); + return traits::max_alignment(alloc); } + /// @} - std::size_t max_alignment() const noexcept + /// @{ + /// \brief Returns a reference to the allocator while keeping it locked. + /// \details It returns a proxy object that holds the lock. + /// It has overloaded operator* and -> to give access to the allocator + /// but it can't be reassigned to a different allocator object. + detail::locked_allocator lock() FOONATHAN_NOEXCEPT { - auto&& alloc = get_allocator(); - return Traits::max_alignment(alloc); + return {*this, *this}; + } + + detail::locked_allocator lock() const FOONATHAN_NOEXCEPT + { + return {*this, *this}; + } + /// @} + + /// \brief Returns the \ref raw_allocator. + /// \details It is a reference for stateful allocators and a temporary for non-stateful. + /// \note This function does not perform any locking and is thus not thread safe. + auto get_allocator() const FOONATHAN_NOEXCEPT + -> typename storage::reference_type + { + return storage::get_allocator(); } }; - /// \brief Creates a \ref raw_allocator_adapter. - /// \relates raw_allocator_adapter + /// @{ + /// \brief Creates a \ref allocator_reference. + /// \relates allocator_reference template - auto make_adapter(RawAllocator &&allocator) noexcept - -> raw_allocator_adapter::type> + auto make_allocator_reference(RawAllocator &&allocator) FOONATHAN_NOEXCEPT + -> allocator_reference::type> + { + return {std::forward(allocator)}; + } + + template + auto make_allocator_reference(RawAllocator &&allocator) FOONATHAN_NOEXCEPT + -> allocator_reference::type, Mutex> { return {std::forward(allocator)}; } + /// @} /// \brief Wraps a \ref concept::RawAllocator to create an \c std::allocator. /// - /// It uses a \ref raw_allocator_adapter to store the allocator to allow copy constructing.
+ /// It uses a \ref allocator_reference to store the allocator to allow copy constructing.
/// The underlying allocator is never moved, only the pointer to it.
/// \c propagate_on_container_swap is \c true to ensure that the allocator stays with its memory. /// \c propagate_on_container_move_assignment is \c true to allow fast move operations. @@ -147,7 +280,7 @@ namespace foonathan { namespace memory /// \ingroup memory template class raw_allocator_allocator - : raw_allocator_adapter + : allocator_reference { public: //=== typedefs ===// @@ -168,35 +301,37 @@ namespace foonathan { namespace memory using impl_allocator = RawAllocator; - //=== constructor ===// - raw_allocator_allocator() = default; + //=== constructor ===// + raw_allocator_allocator(const impl_allocator &alloc = {}) FOONATHAN_NOEXCEPT + : allocator_reference(alloc) {} - using raw_allocator_adapter::raw_allocator_adapter; + raw_allocator_allocator(impl_allocator &alloc) FOONATHAN_NOEXCEPT + : allocator_reference(alloc) {} - raw_allocator_allocator(const raw_allocator_adapter &alloc) noexcept - : raw_allocator_adapter(alloc) {} + raw_allocator_allocator(const allocator_reference &alloc) FOONATHAN_NOEXCEPT + : allocator_reference(alloc) {} template - raw_allocator_allocator(const raw_allocator_allocator &alloc) noexcept - : raw_allocator_adapter(alloc.get_impl_allocator()) {} + raw_allocator_allocator(const raw_allocator_allocator &alloc) FOONATHAN_NOEXCEPT + : allocator_reference(alloc.get_impl_allocator()) {} //=== allocation/deallocation ===// pointer allocate(size_type n, void * = nullptr) { void *mem = nullptr; if (n == 1) - mem = this->allocate_node(sizeof(value_type), alignof(value_type)); + mem = this->allocate_node(sizeof(value_type), FOONATHAN_ALIGNOF(value_type)); else - mem = this->allocate_array(n, sizeof(value_type), alignof(value_type)); + mem = this->allocate_array(n, sizeof(value_type), FOONATHAN_ALIGNOF(value_type)); return static_cast(mem); } - void deallocate(pointer p, size_type n) noexcept + void deallocate(pointer p, size_type n) FOONATHAN_NOEXCEPT { if (n == 1) - this->deallocate_node(p, sizeof(value_type), alignof(value_type)); + this->deallocate_node(p, sizeof(value_type), FOONATHAN_ALIGNOF(value_type)); else - this->deallocate_array(p, n, sizeof(value_type), alignof(value_type)); + this->deallocate_array(p, n, sizeof(value_type), FOONATHAN_ALIGNOF(value_type)); } //=== construction/destruction ===// @@ -208,45 +343,45 @@ namespace foonathan { namespace memory } template - void destroy(U *p) noexcept + void destroy(U *p) FOONATHAN_NOEXCEPT { p->~U(); } //=== getter ===// - size_type max_size() const noexcept + size_type max_size() const FOONATHAN_NOEXCEPT { return this->max_array_size() / sizeof(value_type); } - auto get_impl_allocator() const noexcept - -> decltype(this->get_allocator()) + auto get_impl_allocator() const FOONATHAN_NOEXCEPT + -> decltype(std::declval>().get_allocator()) { return this->get_allocator(); } private: template // stateful - bool equal_to(std::true_type, const raw_allocator_allocator &other) const noexcept + bool equal_to(std::true_type, const raw_allocator_allocator &other) const FOONATHAN_NOEXCEPT { return &get_impl_allocator() == &other.get_impl_allocator(); } template // non=stateful - bool equal_to(std::false_type, const raw_allocator_allocator &) const noexcept + bool equal_to(std::false_type, const raw_allocator_allocator &) const FOONATHAN_NOEXCEPT { return true; } template friend bool operator==(const raw_allocator_allocator &lhs, - const raw_allocator_allocator &rhs) noexcept; + const raw_allocator_allocator &rhs) FOONATHAN_NOEXCEPT; }; /// \brief Makes an \ref raw_allocator_allocator. /// \relates raw_allocator_allocator template - auto make_std_allocator(RawAllocator &&allocator) noexcept + auto make_std_allocator(RawAllocator &&allocator) FOONATHAN_NOEXCEPT -> raw_allocator_allocator::type> { return {std::forward(allocator)}; @@ -254,14 +389,14 @@ namespace foonathan { namespace memory template bool operator==(const raw_allocator_allocator &lhs, - const raw_allocator_allocator &rhs) noexcept + const raw_allocator_allocator &rhs) FOONATHAN_NOEXCEPT { return lhs.equal_to(typename allocator_traits::is_stateful{}, rhs); } template bool operator!=(const raw_allocator_allocator &lhs, - const raw_allocator_allocator &rhs) noexcept + const raw_allocator_allocator &rhs) FOONATHAN_NOEXCEPT { return !(lhs == rhs); } diff --git a/allocator_traits.hpp b/allocator_traits.hpp index 9e62299b..7889c759 100644 --- a/allocator_traits.hpp +++ b/allocator_traits.hpp @@ -12,10 +12,12 @@ #include #include +#include "detail/align.hpp" + namespace foonathan { namespace memory { /// \brief Default traits for \ref concept::RawAllocator classes. - /// \detail Specialize it for own classes. + /// \details Specialize it for own classes. /// \ingroup memory template class allocator_traits @@ -38,35 +40,35 @@ namespace foonathan { namespace memory } static void deallocate_node(allocator_type& state, - void *node, std::size_t size, std::size_t alignment) noexcept + void *node, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { state.deallocate_node(node, size, alignment); } static void deallocate_array(allocator_type& state, void *array, std::size_t count, - std::size_t size, std::size_t alignment) noexcept + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { state.deallocate_array(array, count, size, alignment); } - static std::size_t max_node_size(const allocator_type &state) noexcept + static std::size_t max_node_size(const allocator_type &state) { return state.max_node_size(); } - static std::size_t max_array_size(const allocator_type &state) noexcept + static std::size_t max_array_size(const allocator_type &state) { return state.max_array_size(); } - static std::size_t max_alignment(const allocator_type &state) noexcept + static std::size_t max_alignment(const allocator_type &state) { return state.max_alignment(); } }; /// \brief Provides all traits functions for \c std::allocator types. - /// \detail Inherit from it when specializing the \ref allocator_traits for such allocators.
+ /// \details Inherit from it when specializing the \ref allocator_traits for such allocators.
/// It uses the std::allocator_traits to call the functions. /// \ingroup memory template @@ -86,7 +88,7 @@ namespace foonathan { namespace memory public: /// @{ /// \brief Allocation functions forward to \c allocate(). - /// \detail They request a char-array of sufficient length.
+ /// \details They request a char-array of sufficient length.
/// Alignment is ignored. static void* allocate_node(allocator_type& state, std::size_t size, std::size_t) @@ -104,13 +106,13 @@ namespace foonathan { namespace memory /// @{ /// \brief Deallocation functions forward to \c deallocate(). static void deallocate_node(allocator_type& state, - void *node, std::size_t size, std::size_t) noexcept + void *node, std::size_t size, std::size_t) FOONATHAN_NOEXCEPT { std_traits::deallocate(state, static_cast(node), size); } static void deallocate_array(allocator_type& state, void *array, std::size_t count, - std::size_t size, std::size_t) noexcept + std::size_t size, std::size_t) FOONATHAN_NOEXCEPT { std_traits::deallocate(state, static_cast(array), count * size); } @@ -118,21 +120,21 @@ namespace foonathan { namespace memory /// @{ /// \brief The maximum size forwards to \c max_size(). - static std::size_t max_node_size(const allocator_type &state) noexcept + static std::size_t max_node_size(const allocator_type &state) FOONATHAN_NOEXCEPT { return std_traits::max_size(state); } - static std::size_t max_array_size(const allocator_type &state) noexcept + static std::size_t max_array_size(const allocator_type &state) FOONATHAN_NOEXCEPT { return std_traits::max_size(state); } /// @} /// \brief Maximum alignment is \c alignof(std::max_align_t). - static std::size_t max_alignment(const allocator_type &) noexcept + static std::size_t max_alignment(const allocator_type &) FOONATHAN_NOEXCEPT { - return alignof(std::max_align_t); + return detail::max_alignment; } }; diff --git a/biicode.conf b/biicode.conf index 86488e76..a4574ccb 100644 --- a/biicode.conf +++ b/biicode.conf @@ -4,39 +4,18 @@ biicode/cmake: 3 [parent] - # The parent version of this block. Must match folder name. E.g. - # user/block # No version number means not published yet - # You can change it to publish to a different track, and change version, e.g. - # user/block(track): 7 - foonathan/memory: 0 - + foonathan/memory: 0 + [paths] - # Local directories to look for headers (within block) - # / - # include [dependencies] - # Manual adjust file implicit dependencies, add (+), remove (-), or overwrite (=) - # hello.h + hello_imp.cpp hello_imp2.cpp - # *.h + *.cpp + CMakeLists.txt + config.hpp.in [mains] - # Manual adjust of files that define an executable - # !main.cpp # Do not build executable from this file - # main2.cpp # Build it (it doesnt have a main() function, but maybe it includes it) [hooks] - # These are defined equal to [dependencies],files names matching bii*stage*hook.py - # will be launched as python scripts at stage = {post_process, clean} - # CMakeLists.txt + bii/my_post_process1_hook.py bii_clean_hook.py [includes] - # Mapping of include patterns to external blocks - # hello*.h: user3/depblock # includes will be processed as user3/depblock/hello*.h [data] - # Manually define data files dependencies, that will be copied to bin for execution - # By default they are copied to bin/user/block/... which should be taken into account - # when loading from disk such data - # image.cpp + image.jpg # code should write open("user/block/image.jpg") diff --git a/config.hpp b/config.hpp new file mode 100644 index 00000000..7834458e --- /dev/null +++ b/config.hpp @@ -0,0 +1,12 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +// dummy header including the real config file from the CMake binary dir + +#ifndef FOONATHAN_MEMORY_CONFIG_HPP_INCLUDED +#define FOONATHAN_MEMORY_CONFIG_HPP_INCLUDED + +#include "config_impl.hpp" + +#endif // FOONATHAN_MEMORY_CONFIG_HPP_INCLUDED diff --git a/config.hpp.in b/config.hpp.in new file mode 100644 index 00000000..9773ec74 --- /dev/null +++ b/config.hpp.in @@ -0,0 +1,68 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#include + +//=== version ===// +/// \brief Major version. +#define FOONATHAN_MEMORY_VERSION_MAJOR ${version_major} + +/// \brief Minor version. +#define FOONATHAN_MEMORY_VERSION_MINOR ${version_minor} + +/// \brief Total version number. +#define FOONATHAN_MEMORY_VERSION (${version_major} * 100 + ${version_minor}) + +//=== options ===// +#define FOONATHAN_MEMORY_IMPL_DEFAULT_ALLOCATOR ${FOONATHAN_MEMORY_DEFAULT_ALLOCATOR} + +/// \brief Whether or not the \ref raw_allocator_adapter is threadsafe by default. +/// \detail This is \c true by default,change it via the CMake option \c FOONATHAN_MEMORY_THREAD_SAFE_ADAPTER. +#cmakedefine01 FOONATHAN_MEMORY_THREAD_SAFE_ADAPTER + +//=== compatibility ===// +#cmakedefine01 FOONATHAN_IMPL_HAS_ALIGNOF +#cmakedefine01 FOONATHAN_IMPL_HAS_MAX_ALIGN +#cmakedefine01 FOONATHAN_IMPL_HAS_GET_NEW_HANDLER +#cmakedefine01 FOONATHAN_IMPL_HAS_THREAD_LOCAL +#cmakedefine01 FOONATHAN_IMPL_HAS_NOEXCEPT +#cmakedefine01 FOONATHAN_IMPL_HAS_CONSTEXPR + +#ifndef FOONATHAN_ALIGNOF + #if FOONATHAN_IMPL_HAS_ALIGNOF + #define FOONATHAN_ALIGNOF(x) alignof(x) + #elif !FOONATHAN_IMPL_HAS_ALIGNOF && defined _MSC_VER + #define FOONATHAN_ALIGNOF(x) __alignof(x) + #else + #error "no alignof replacement available" + #endif +#endif + +#ifndef FOONATHAN_THREAD_LOCAL + #if FOONATHAN_IMPL_HAS_THREAD_LOCAL + #define FOONATHAN_THREAD_LOCAL thread_local + #elif !FOONATHAN_IMPL_HAS_THREAD_LOCAL && defined __GNUC__ + #define FOONATHAN_THREAD_LOCAL __thread + #elif !FOONATHAN_IMPL_HAS_THREAD_LOCAL && defined _MSC_VER + #define FOONATHAN_THREAD_LOCAL __declspec(thread) + #else + #error "no thread_local replacement available" + #endif +#endif + +#ifndef FOONATHAN_NOEXCEPT + #if FOONATHAN_IMPL_HAS_NOEXCEPT + #define FOONATHAN_NOEXCEPT noexcept + #else + #define FOONATHAN_NOEXCEPT + #endif +#endif + +#ifndef FOONATHAN_CONSTEXPR + #if FOONATHAN_IMPL_HAS_CONSTEXPR + #define FOONATHAN_CONSTEXPR constexpr + #else + #define FOONATHAN_CONSTEXPR const + #endif +#endif diff --git a/container.hpp b/container.hpp new file mode 100644 index 00000000..aaa7a971 --- /dev/null +++ b/container.hpp @@ -0,0 +1,110 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#ifndef FOONATHAN_MEMORY_CONTAINER_HPP_INCLUDED +#define FOONATHAN_MEMORY_CONTAINER_HPP_INCLUDED + +/// \file +/// \brief Aliasas for STL containers to apply \c RawAllocator more easily. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "allocator_adapter.hpp" + +namespace foonathan { namespace memory +{ + /// @{ + /// \brief Aliases for STL containers using a \c RawAllocator. + /// \details It is just a shorthand to pass a \c RawAllocator to a container. + /// \ingroup memory +#define FOONATHAN_MEMORY_IMPL_CONTAINER1(Name) \ + template \ + using Name = std::Name>; + + FOONATHAN_MEMORY_IMPL_CONTAINER1(vector) + FOONATHAN_MEMORY_IMPL_CONTAINER1(deque) + FOONATHAN_MEMORY_IMPL_CONTAINER1(list) + FOONATHAN_MEMORY_IMPL_CONTAINER1(forward_list) + +#undef FOONATHAN_MEMORY_IMPL_CONTAINER1 + + template + using set = std::set, raw_allocator_allocator>; + + template + using multiset = std::multiset, raw_allocator_allocator>; + + template + using map = std::map, + raw_allocator_allocator, RawAllocator>>; + + template + using multimap = std::multimap, + raw_allocator_allocator, RawAllocator>>; + + template + using unordered_set = std::unordered_set, std::equal_to, + raw_allocator_allocator>; + + template + using unordered_multiset = std::unordered_multiset, std::equal_to, + raw_allocator_allocator>; + + template + using unordered_map = std::unordered_map, std::equal_to, + raw_allocator_allocator, RawAllocator>>; + + template + using unordered_multimap = std::unordered_multimap, std::equal_to, + raw_allocator_allocator, RawAllocator>>; + +#define FOONATHAN_MEMORY_IMPL_CONTAINER_ADAPTER(Name) \ + template \ + using Name = std::Name>; + FOONATHAN_MEMORY_IMPL_CONTAINER_ADAPTER(stack) + FOONATHAN_MEMORY_IMPL_CONTAINER_ADAPTER(queue) + FOONATHAN_MEMORY_IMPL_CONTAINER_ADAPTER(priority_queue) +#undef FOONATHAN_MEMORY_IMPL_CONTAINER_ADAPTER + /// @} + + /// @{ + /// \brief Convienience function to create a container adapter. + /// \details Creates this function and passes it the underlying container with certain allocator. + /// \ingroup memory + template > + std::stack make_stack(RawAllocator &allocator) + { + return std::stack{Container(allocator)}; + } + + template > + std::queue make_queue(RawAllocator &allocator) + { + return std::queue{Container(allocator)}; + } + + template , + class Compare = std::less> + std::priority_queue + make_priority_queue(RawAllocator &allocator, Compare comp = {}) + { + return std::priority_queue + {std::move(comp), Container(allocator)}; + } + /// @} +}} // namespace foonathan::memory + +#endif // FOONATHAN_MEMORY_CONTAINER_HPP_INCLUDED diff --git a/default_allocator.hpp b/default_allocator.hpp new file mode 100644 index 00000000..76af257e --- /dev/null +++ b/default_allocator.hpp @@ -0,0 +1,24 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#ifndef FOONATHAN_MEMORY_DEFAULT_ALLOCATOR_HPP_INCLUDED +#define FOONATHAN_MEMORY_DEFAULT_ALLOCATOR_HPP_INCLUDED + +#include "config.hpp" +#include "heap_allocator.hpp" +#include "new_allocator.hpp" + +namespace foonathan { namespace memory +{ + /// \brief The default allocator as implementation for the higher-level ones. + /// \details The higher-level allocator (\ref memory_stack, \ref memory_pool) use this allocator as default. + /// It must be one of the low-level, statelesss allocators.
+ /// You can change it via the CMake variable \c FOONATHAN_MEMORY_DEFAULT_ALLOCATOR, + /// but it must be one of the following: \ref heap_allocator, \ref new_allocator. + /// The default is \ref heap_allocator. + /// \ingroup memory + using default_allocator = FOONATHAN_MEMORY_IMPL_DEFAULT_ALLOCATOR; +}} // namespace foonathan::memory + +#endif // FOONATHAN_MEMORY_DEFAULT_ALLOCATOR_HPP_INCLUDED diff --git a/detail/align.hpp b/detail/align.hpp index 4667c128..b37f3b13 100644 --- a/detail/align.hpp +++ b/detail/align.hpp @@ -1,22 +1,43 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + #ifndef FOONATHAN_MEMORY_DETAIL_ALIGN_HPP_INCLUDED #define FOONATHAN_MEMORY_DETAIL_ALIGN_HPP_INCLUDED +#include #include +#include "../config.hpp" + namespace foonathan { namespace memory { namespace detail { // returns the offset needed to align ptr for given alignment // alignment must be a power of two - inline std::size_t align_offset(void *ptr, std::size_t alignment) noexcept + inline std::size_t align_offset(void *ptr, std::size_t alignment) FOONATHAN_NOEXCEPT { auto address = reinterpret_cast(ptr); auto misaligned = address & (alignment - 1); // misaligned != 0 ? (alignment - misaligned) : 0 return misaligned * (alignment - misaligned); } + + // max_align_t is sometimes not in namespace std and sometimes not available at all + #if FOONATHAN_IMPL_HAS_MAX_ALIGN + namespace max_align + { + using namespace std; + using type = max_align_t; + } + + FOONATHAN_CONSTEXPR auto max_alignment = FOONATHAN_ALIGNOF(max_align::type); + #else + // assume long double has maximum alignment + FOONATHAN_CONSTEXPR auto max_alignment = FOONATHAN_ALIGNOF(long double); + #endif } // namespace detail }} // namespace foonathan::memory -#endif // FOONATHAN_MEMORY_DETAIL_ALIGN_HPP_INCLUDED \ No newline at end of file +#endif // FOONATHAN_MEMORY_DETAIL_ALIGN_HPP_INCLUDED diff --git a/detail/block_list.cpp b/detail/block_list.cpp index 1f4c2f91..6cc519b4 100644 --- a/detail/block_list.cpp +++ b/detail/block_list.cpp @@ -1,3 +1,7 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + #include "block_list.hpp" #include @@ -11,8 +15,8 @@ struct block_list_impl::node node *prev; std::size_t size; - node(node *prev, std::size_t size) noexcept - : prev(prev), size(size) {} + node(node *p, std::size_t s) FOONATHAN_NOEXCEPT + : prev(p), size(s) {} }; std::size_t block_list_impl::impl_offset() @@ -20,7 +24,7 @@ std::size_t block_list_impl::impl_offset() return sizeof(node); } -std::size_t block_list_impl::push(void* &memory, std::size_t size) noexcept +std::size_t block_list_impl::push(void* &memory, std::size_t size) FOONATHAN_NOEXCEPT { auto ptr = ::new(memory) node(head_, size); head_ = ptr; @@ -28,7 +32,7 @@ std::size_t block_list_impl::push(void* &memory, std::size_t size) noexcept return sizeof(node); } -block_info block_list_impl::push(block_list_impl &other) noexcept +block_info block_list_impl::push(block_list_impl &other) FOONATHAN_NOEXCEPT { assert(other.head_ && "stack underflow"); auto top = other.head_; @@ -38,7 +42,7 @@ block_info block_list_impl::push(block_list_impl &other) noexcept return {top, top->size - sizeof(node)}; } -block_info block_list_impl::pop() noexcept +block_info block_list_impl::pop() FOONATHAN_NOEXCEPT { assert(head_ && "stack underflow"); auto top = head_; diff --git a/detail/block_list.hpp b/detail/block_list.hpp index 1231a565..440b5435 100644 --- a/detail/block_list.hpp +++ b/detail/block_list.hpp @@ -1,9 +1,15 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + #ifndef FOONATHAN_MEMORY_DETAIL_BLOCK_LIST_HPP_INCLUDED #define FOONATHAN_MEMORY_DETAIL_BLOCK_LIST_HPP_INCLUDED #include #include +#include "align.hpp" + namespace foonathan { namespace memory { namespace detail @@ -14,8 +20,8 @@ namespace foonathan { namespace memory void *memory; std::size_t size; - block_info(void *memory, std::size_t size) noexcept - : memory(memory), size(size) {} + block_info(void *mem, std::size_t s) FOONATHAN_NOEXCEPT + : memory(mem), size(s) {} }; // simple intrusive list managing memory blocks @@ -26,33 +32,34 @@ namespace foonathan { namespace memory // the size needed for the implementation static std::size_t impl_offset(); - block_list_impl() noexcept = default; - block_list_impl(block_list_impl &&other) noexcept + block_list_impl() FOONATHAN_NOEXCEPT = default; + block_list_impl(block_list_impl &&other) FOONATHAN_NOEXCEPT : head_(other.head_) { other.head_ = nullptr; } - ~block_list_impl() noexcept = default; + ~block_list_impl() FOONATHAN_NOEXCEPT = default; - block_list_impl& operator=(block_list_impl &&other) noexcept + block_list_impl& operator=(block_list_impl &&other) FOONATHAN_NOEXCEPT { head_ = other.head_; other.head_ = nullptr; + return *this; } // inserts a new memory block, returns the size needed for the implementation - std::size_t push(void* &memory, std::size_t size) noexcept; + std::size_t push(void* &memory, std::size_t size) FOONATHAN_NOEXCEPT; // inserts the top memory block of another list, pops it from the other one // returns the memory block // its size is the usable memory size - block_info push(block_list_impl &other) noexcept; + block_info push(block_list_impl &other) FOONATHAN_NOEXCEPT; // pops the memory block at the top // its size is the original size passed to push - block_info pop() noexcept; + block_info pop() FOONATHAN_NOEXCEPT; - bool empty() const noexcept + bool empty() const FOONATHAN_NOEXCEPT { return head_ == nullptr; } @@ -68,22 +75,43 @@ namespace foonathan { namespace memory template class block_list : RawAllocator { - static constexpr auto growth_factor = 2u; + static FOONATHAN_CONSTEXPR auto growth_factor = 2u; public: // gives it an initial block size and allocates it // the blocks get large and large the more are needed block_list(std::size_t block_size, RawAllocator allocator) : RawAllocator(std::move(allocator)), cur_block_size_(block_size) {} - block_list(block_list &&) = default; - ~block_list() noexcept + + block_list(block_list &&other) FOONATHAN_NOEXCEPT + : used_(std::move(other.used_)), free_(std::move(other.free_)), + size_(other.size_), cur_block_size_(other.cur_block_size_) + { + other.size_ = other.cur_block_size_ = 0u; + } + + ~block_list() FOONATHAN_NOEXCEPT { shrink_to_fit(); + while (!used_.empty()) + { + auto block = used_.pop(); + get_allocator(). + deallocate_node(block.memory, block.size, detail::max_alignment); + } } - block_list& operator=(block_list &&) = default; + block_list& operator=(block_list &&other) FOONATHAN_NOEXCEPT + { + used_ = std::move(other.used_); + free_ = std::move(other.free_); + size_ = other.size_; + cur_block_size_ = other.cur_block_size_; + other.size_ = other.cur_block_size_ = 0u; + return *this; + } - RawAllocator& get_allocator() noexcept + RawAllocator& get_allocator() FOONATHAN_NOEXCEPT { return *this; } @@ -95,7 +123,7 @@ namespace foonathan { namespace memory if (free_.empty()) { auto memory = get_allocator(). - allocate_node(cur_block_size_, alignof(std::max_align_t)); + allocate_node(cur_block_size_, detail::max_alignment); ++size_; auto size = cur_block_size_ - used_.push(memory, cur_block_size_); cur_block_size_ *= growth_factor; @@ -108,30 +136,30 @@ namespace foonathan { namespace memory // deallocates the last allocate block // does not free memory, caches the block for future use - void deallocate() noexcept + void deallocate() FOONATHAN_NOEXCEPT { --size_; free_.push(used_); } // deallocates all unused cached blocks - void shrink_to_fit() noexcept + void shrink_to_fit() FOONATHAN_NOEXCEPT { while (!free_.empty()) { auto block = free_.pop(); get_allocator(). - deallocate_node(block.memory, block.size, alignof(std::max_align_t)); + deallocate_node(block.memory, block.size, detail::max_alignment); } } // returns the next block size - std::size_t next_block_size() const noexcept + std::size_t next_block_size() const FOONATHAN_NOEXCEPT { return cur_block_size_ - block_list_impl::impl_offset(); } - std::size_t size() const noexcept + std::size_t size() const FOONATHAN_NOEXCEPT { return size_; } diff --git a/detail/free_list.cpp b/detail/free_list.cpp index b1849bc5..264b654b 100644 --- a/detail/free_list.cpp +++ b/detail/free_list.cpp @@ -1,7 +1,13 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + #include "free_list.hpp" +#include #include #include +#include #include #include @@ -11,50 +17,58 @@ using namespace detail; namespace { // pre: ptr - char*& next(void* ptr) noexcept + char* get_next(void* ptr) FOONATHAN_NOEXCEPT + { + char* result = nullptr; + std::memcpy(&result, ptr, sizeof(char*)); + return result; + } + + // pre: ptr + void set_next(void *ptr, char *next) FOONATHAN_NOEXCEPT { - return *static_cast(ptr); + std::memcpy(ptr, &next, sizeof(char*)); } // pre: mem, el_size > sizeof(mem), size >= el_size - void* build_list(void* &mem, std::size_t el_size, std::size_t size) noexcept + void* build_list(void* &mem, std::size_t el_size, std::size_t size) FOONATHAN_NOEXCEPT { auto no_blocks = size / el_size; auto ptr = static_cast(mem); for (std::size_t i = 0u; i != no_blocks - 1; ++i, ptr += el_size) - next(ptr) = ptr + el_size; + set_next(ptr, ptr + el_size); return ptr; } // pre: list, mem - std::pair find_position(void *list, void *mem) noexcept + std::pair find_position(void *list, void *mem) FOONATHAN_NOEXCEPT { auto greater = std::greater(); auto prev = static_cast(list); if (greater(prev, static_cast(mem))) return std::make_pair(nullptr, list); - auto ptr = next(prev); + auto ptr = get_next(prev); while (ptr) { if (greater(ptr, static_cast(mem))) break; prev = ptr; - ptr = next(ptr); + ptr = get_next(ptr); } return std::make_pair(prev, ptr); } - // pre: beg - bool check_n(char* &cur, std::size_t n, std::size_t el_size) noexcept + // pre: cur + bool check_n(char* &cur, std::size_t n, std::size_t el_size) FOONATHAN_NOEXCEPT { --n; // we already have one (cur) if (n == 0u) return true; for (; cur; cur += el_size) { - if (next(cur) == cur + el_size) + if (get_next(cur) == cur + el_size) { if (--n == 0) break; @@ -62,37 +76,37 @@ namespace else return false; } - // next(cur) is the last element of the array - cur = next(cur); + // get_next(cur) is the last element of the array + cur = get_next(cur); return true; } } -constexpr std::size_t free_memory_list::min_element_size; -constexpr std::size_t free_memory_list::min_element_alignment; +FOONATHAN_CONSTEXPR std::size_t free_memory_list::min_element_size; +FOONATHAN_CONSTEXPR std::size_t free_memory_list::min_element_alignment; -free_memory_list::free_memory_list(std::size_t el_size) noexcept +free_memory_list::free_memory_list(std::size_t el_size) FOONATHAN_NOEXCEPT : first_(nullptr), el_size_(std::max(min_element_size, el_size)), capacity_(0u) {} free_memory_list::free_memory_list(std::size_t el_size, - void *mem, std::size_t size) noexcept + void *mem, std::size_t size) FOONATHAN_NOEXCEPT : free_memory_list(el_size) { insert(mem, size); } -void free_memory_list::insert(void *mem, std::size_t size) noexcept +void free_memory_list::insert(void *mem, std::size_t size) FOONATHAN_NOEXCEPT { - capacity_ += size; + capacity_ += size / el_size_; auto last = build_list(mem, el_size_, size); - next(last) = first_; + set_next(last, first_); first_ = static_cast(mem); } -void free_memory_list::insert_ordered(void *mem, std::size_t size) noexcept +void free_memory_list::insert_ordered(void *mem, std::size_t size) FOONATHAN_NOEXCEPT { - capacity_ += size; + capacity_ += size / el_size_; if (empty()) return insert(mem, size); @@ -102,53 +116,62 @@ void free_memory_list::insert_ordered(void *mem, std::size_t size) noexcept } void free_memory_list::insert_between(void *pre, void *after, - void *mem, std::size_t size) noexcept + void *mem, std::size_t size) FOONATHAN_NOEXCEPT { auto last = build_list(mem, el_size_, size); if (pre) - next(pre) = static_cast(mem); + set_next(pre, static_cast(mem)); else first_ = static_cast(mem); - next(last) = static_cast(after); + set_next(last, static_cast(after)); } -void* free_memory_list::allocate() noexcept +void* free_memory_list::allocate() FOONATHAN_NOEXCEPT { - capacity_ -= el_size_; + --capacity_; auto block = first_; - first_ = next(first_); + first_ = get_next(first_); return block; } -void* free_memory_list::allocate(std::size_t n) noexcept +void* free_memory_list::allocate(std::size_t n) FOONATHAN_NOEXCEPT { - capacity_ -= n * el_size_; - for(auto cur = first_; cur; cur = next(cur)) + capacity_ -= n; + for(auto cur = first_; cur; cur = get_next(cur)) { auto start = cur; if (check_n(cur, n, el_size_)) { // found n continuos nodes // cur is the last element, next(cur) is the next free node - first_ = next(cur); + first_ = get_next(cur); return start; } } return nullptr; } -void free_memory_list::deallocate(void *ptr) noexcept +void free_memory_list::deallocate(void *ptr) FOONATHAN_NOEXCEPT { - capacity_ += el_size_; - next(ptr) = first_; + ++capacity_; + set_next(ptr, first_); first_ = static_cast(ptr); } -void free_memory_list::deallocate_ordered(void *ptr) noexcept +void free_memory_list::deallocate_ordered(void *ptr) FOONATHAN_NOEXCEPT { - capacity_ += el_size_; + ++capacity_; auto pos = find_position(first_, ptr); insert_between(pos.first, pos.second, ptr, el_size_); } + +std::size_t free_memory_list::calc_block_count(std::size_t pool_element_size, + std::size_t count, std::size_t node_size) FOONATHAN_NOEXCEPT +{ + assert(node_size <= pool_element_size); + auto ratio = pool_element_size / node_size; + auto rest = count % ratio; + return count / ratio + (rest ? 1 : 0); +} diff --git a/detail/free_list.hpp b/detail/free_list.hpp index 23baa7b9..d90f4a30 100644 --- a/detail/free_list.hpp +++ b/detail/free_list.hpp @@ -1,11 +1,16 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + #ifndef FOONATHAN_MEMORY_DETAILL_FREE_LIST_HPP_INCLUDED #define FOONATHAN_MEMORY_DETAILL_FREE_LIST_HPP_INCLUDED #include +#include "../config.hpp" + namespace foonathan { namespace memory { - /// \cond impl namespace detail { // manages free memory blocks for a pool @@ -13,76 +18,81 @@ namespace foonathan { namespace memory // there are two versions for the functions: ordered and non ordered // non ordered is faster, because it does not keep the ist sorted // ordered allows arrays because multiple free blocks are stored after each other + // note: type must be trivially destructible! class free_memory_list { public: // minimum element size - static constexpr auto min_element_size = sizeof(char*); + static FOONATHAN_CONSTEXPR auto min_element_size = sizeof(char*); // alignment - static constexpr auto min_element_alignment = alignof(char*); + static FOONATHAN_CONSTEXPR auto min_element_alignment = FOONATHAN_ALIGNOF(char*); //=== constructor ===// - free_memory_list(std::size_t el_size) noexcept; + free_memory_list(std::size_t el_size) FOONATHAN_NOEXCEPT; // does not own memory! free_memory_list(std::size_t el_size, - void *mem, std::size_t size) noexcept; + void *mem, std::size_t size) FOONATHAN_NOEXCEPT; //=== insert/allocation/deallocation ===// // inserts a new memory block, by splitting it up and setting the links // does not own memory! // pre: size != 0 - void insert(void *mem, std::size_t size) noexcept; - void insert_ordered(void *mem, std::size_t size) noexcept; + void insert(void *mem, std::size_t size) FOONATHAN_NOEXCEPT; + void insert_ordered(void *mem, std::size_t size) FOONATHAN_NOEXCEPT; // returns single block from the list // pre: !empty() - void* allocate() noexcept; + void* allocate() FOONATHAN_NOEXCEPT; // returns the start to multiple blocks after each other // pre: !empty() // can return nullptr if no block found // won't necessarily work if non-ordered functions are called - void* allocate(std::size_t n) noexcept; + void* allocate(std::size_t n) FOONATHAN_NOEXCEPT; - void deallocate(void *ptr) noexcept; - void deallocate(void *ptr, std::size_t n) noexcept + void deallocate(void *ptr) FOONATHAN_NOEXCEPT; + void deallocate(void *ptr, std::size_t n) FOONATHAN_NOEXCEPT { - insert(ptr, n * element_size()); + insert(ptr, n * node_size()); } - void deallocate_ordered(void *ptr) noexcept; - void deallocate_ordered(void *ptr, std::size_t n) noexcept + void deallocate_ordered(void *ptr) FOONATHAN_NOEXCEPT; + void deallocate_ordered(void *ptr, std::size_t n) FOONATHAN_NOEXCEPT { - insert_ordered(ptr, n * element_size()); + insert_ordered(ptr, n * node_size()); } //=== getter ===// - std::size_t element_size() const noexcept + std::size_t node_size() const FOONATHAN_NOEXCEPT { return el_size_; } - // number of bytes, not elements - std::size_t capacity() const noexcept + // number of nodes remaining + std::size_t capacity() const FOONATHAN_NOEXCEPT { return capacity_; } - bool empty() const noexcept + bool empty() const FOONATHAN_NOEXCEPT { return !first_; } + + // calculates required block count for an array of smaller elements + // pre: node_size <= pool_element_size + static std::size_t calc_block_count(std::size_t pool_element_size, + std::size_t count, std::size_t node_size) FOONATHAN_NOEXCEPT; private: void insert_between(void *pre, void *after, - void *mem, std::size_t size) noexcept; + void *mem, std::size_t size) FOONATHAN_NOEXCEPT; char *first_; std::size_t el_size_, capacity_; }; } // namespace detail - /// \endcond }} // namespace foonathan::memory #endif // FOONATHAN_MEMORY_DETAILL_FREE_LIST_HPP_INCLUDED diff --git a/detail/memory_stack.hpp b/detail/memory_stack.hpp index 80c74e67..0fd5b4f2 100644 --- a/detail/memory_stack.hpp +++ b/detail/memory_stack.hpp @@ -1,3 +1,7 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + #ifndef FOONATHAN_MEMORY_DETAIL_MEMORY_STACK_HPP_INCLUDED #define FOONATHAN_MEMORY_DETAIL_MEMORY_STACK_HPP_INCLUDED @@ -15,20 +19,20 @@ namespace foonathan { namespace memory { public: // gives it a memory block - fixed_memory_stack(void *memory, std::size_t size) noexcept + fixed_memory_stack(void *memory, std::size_t size) FOONATHAN_NOEXCEPT : cur_(static_cast(memory)), end_(cur_ + size) {} - fixed_memory_stack(block_info info) noexcept + fixed_memory_stack(block_info info) FOONATHAN_NOEXCEPT : fixed_memory_stack(info.memory, info.size) {} - fixed_memory_stack() noexcept + fixed_memory_stack() FOONATHAN_NOEXCEPT : fixed_memory_stack(nullptr, 0) {} // allocates memory by advancing the stack, returns nullptr if insufficient - void* allocate(std::size_t size, std::size_t alignment) noexcept + void* allocate(std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { auto offset = align_offset(cur_, alignment); - if (offset + size > end_ - cur_) + if (std::ptrdiff_t(offset + size) > end_ - cur_) return nullptr; cur_ += offset; auto memory = cur_; @@ -37,13 +41,13 @@ namespace foonathan { namespace memory } // returns the current top - char* top() const noexcept + char* top() const FOONATHAN_NOEXCEPT { return cur_; } // returns the end of the stack - const char* end() const noexcept + const char* end() const FOONATHAN_NOEXCEPT { return end_; } diff --git a/detail/small_free_list.cpp b/detail/small_free_list.cpp new file mode 100644 index 00000000..d022a45c --- /dev/null +++ b/detail/small_free_list.cpp @@ -0,0 +1,181 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#include "small_free_list.hpp" + +#include +#include +#include + +#include "align.hpp" + +using namespace foonathan::memory; +using namespace detail; + +namespace +{ + static FOONATHAN_CONSTEXPR auto alignment_div = sizeof(chunk) / detail::max_alignment; + static FOONATHAN_CONSTEXPR auto alignment_mod = sizeof(chunk) % detail::max_alignment; + // offset from chunk to actual list + static FOONATHAN_CONSTEXPR auto chunk_memory_offset = alignment_mod == 0u ? sizeof(chunk) + : (alignment_div + 1) * detail::max_alignment; + // maximum nodes per chunk + static FOONATHAN_CONSTEXPR auto chunk_max_nodes = std::numeric_limits::max(); + + // returns the memory of the actual free list of a chunk + unsigned char* list_memory(void *c) FOONATHAN_NOEXCEPT + { + return static_cast(c) + chunk_memory_offset; + } + + // creates a chunk at mem + // mem must have at least the size chunk_memory_offset + no_nodes * node_size + chunk* create_chunk(void *mem, std::size_t node_size, unsigned char no_nodes) FOONATHAN_NOEXCEPT + { + auto c = ::new(mem) chunk; + c->first_node = 0; + c->no_nodes = no_nodes; + c->capacity = no_nodes; + auto p = list_memory(c); + for (unsigned char i = 0u; i != no_nodes; p += node_size) + *p = ++i; + return c; + } + + // inserts a chunk into the chunk list + // pushs it at the back of the list + void insert_chunk(chunk &list, chunk *c) FOONATHAN_NOEXCEPT + { + c->prev = list.prev; + c->next = &list; + list.prev = c; + if (list.next == &list) + list.next = c; + } + + // whether or not a pointer can be from a certain chunk + bool from_chunk(chunk *c, std::size_t node_size, void *mem) FOONATHAN_NOEXCEPT + { + // comparision not strictly legal, but works + return list_memory(c) <= mem + && mem < list_memory(c) + node_size * c->no_nodes; + } +} + +FOONATHAN_CONSTEXPR std::size_t small_free_memory_list::min_element_size; +FOONATHAN_CONSTEXPR std::size_t small_free_memory_list::min_element_alignment; + +small_free_memory_list::small_free_memory_list(std::size_t node_size) FOONATHAN_NOEXCEPT +: alloc_chunk_(&dummy_chunk_), dealloc_chunk_(&dummy_chunk_), unused_chunk_(nullptr), + node_size_(node_size), capacity_(0u) {} + + small_free_memory_list::small_free_memory_list(std::size_t node_size, + void *mem, std::size_t size) FOONATHAN_NOEXCEPT +: small_free_memory_list(node_size) +{ + insert(mem, size); +} + +void small_free_memory_list::insert(void *memory, std::size_t size) FOONATHAN_NOEXCEPT +{ + auto chunk_unit = chunk_memory_offset + node_size_ * chunk_max_nodes; + auto no_chunks = size / chunk_unit; + auto mem = static_cast(memory); + for (std::size_t i = 0; i != no_chunks; ++i) + { + auto c = create_chunk(mem, node_size_, chunk_max_nodes); + c->next = nullptr; + c->prev = unused_chunk_; + unused_chunk_ = c; + mem += chunk_unit; + } + auto remaining = size % chunk_unit - chunk_memory_offset; + if (remaining > node_size_) + { + auto c = create_chunk(mem, node_size_, static_cast(remaining / node_size_)); + c->next = nullptr; + c->prev = unused_chunk_; + unused_chunk_ = c; + } + auto inserted_memory = no_chunks * chunk_max_nodes + remaining / node_size_; + assert(inserted_memory > 0u && "too small memory size"); + capacity_ += inserted_memory; +} + +void* small_free_memory_list::allocate() FOONATHAN_NOEXCEPT +{ + if (alloc_chunk_->capacity == 0u) + find_chunk(1); + assert(alloc_chunk_->capacity != 0u); + auto memory = list_memory(alloc_chunk_) + alloc_chunk_->first_node * node_size_; + alloc_chunk_->first_node = *memory; + --alloc_chunk_->capacity; + --capacity_; + return memory; +} + +void small_free_memory_list::deallocate(void *node) FOONATHAN_NOEXCEPT +{ + if (!from_chunk(dealloc_chunk_, node_size_, node)) + { + auto next = dealloc_chunk_->next, prev = dealloc_chunk_->prev; + while (next != dealloc_chunk_ || prev != dealloc_chunk_) + { + if (from_chunk(next, node_size_, node)) + { + dealloc_chunk_ = next; + break; + } + else if (from_chunk(prev, node_size_, node)) + { + dealloc_chunk_ = prev; + break; + } + next = next->next; + prev = prev->prev; + } + } + assert(from_chunk(dealloc_chunk_, node_size_, node)); + auto node_mem = static_cast(node); + *node_mem = dealloc_chunk_->first_node; + auto offset = static_cast(node_mem - list_memory(dealloc_chunk_)); + assert(offset % node_size_ == 0); + dealloc_chunk_->first_node = static_cast(offset / node_size_); + ++dealloc_chunk_->capacity; + ++capacity_; +} + +bool small_free_memory_list::find_chunk(std::size_t n) FOONATHAN_NOEXCEPT +{ + assert(capacity_ >= n && n <= chunk_max_nodes); + if (alloc_chunk_->capacity >= n) + return true; + else if (unused_chunk_) + { + auto c = unused_chunk_; + unused_chunk_ = unused_chunk_->prev; + insert_chunk(dummy_chunk_, c); + alloc_chunk_ = c; + return true; + } + + auto next = dealloc_chunk_->next; + auto prev = dealloc_chunk_->prev; + while (next != dealloc_chunk_ || prev != dealloc_chunk_) + { + if (next->capacity >= n) + { + alloc_chunk_ = next; + return true; + } + else if (prev->capacity >= n) + { + alloc_chunk_ = prev; + return true; + } + next = next->next; + prev = prev->prev; + } + return true; +} diff --git a/detail/small_free_list.hpp b/detail/small_free_list.hpp new file mode 100644 index 00000000..e2712801 --- /dev/null +++ b/detail/small_free_list.hpp @@ -0,0 +1,88 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#ifndef FOONATHAN_MEMORY_DETAIL_SMALL_FREE_LIST_HPP_INCLUDED +#define FOONATHAN_MEMORY_DETAIL_SMALL_FREE_LIST_HPP_INCLUDED + +#include + +#include "../config.hpp" + +namespace foonathan { namespace memory +{ + namespace detail + { + // a chunk in the free list + struct chunk + { + chunk *next = this, *prev = this; + unsigned char first_node = 0u, capacity = 0u, no_nodes = 0u; + }; + + // the same as free_memory_list but optimized for small node sizes + // it is slower and does not support arrays + // but has very small overhead + class small_free_memory_list + { + public: + // minimum element size + static FOONATHAN_CONSTEXPR std::size_t min_element_size = 1; + // alignment + static FOONATHAN_CONSTEXPR std::size_t min_element_alignment = 1; + + //=== constructor ===// + small_free_memory_list(std::size_t node_size) FOONATHAN_NOEXCEPT; + + // does not own memory! + small_free_memory_list(std::size_t node_size, + void *mem, std::size_t size) FOONATHAN_NOEXCEPT; + + //=== insert/alloc/dealloc ===// + // inserts new memory of given size into the free list + void insert(void *mem, std::size_t size) FOONATHAN_NOEXCEPT; + + // allocates a node big enough for the node size + // pre: !empty() + void* allocate() FOONATHAN_NOEXCEPT; + + // deallocates the node previously allocated via allocate() + void deallocate(void *node) FOONATHAN_NOEXCEPT; + + // hint for allocate() to be prepared to allocate n nodes + // it searches for a chunk that has n nodes free + // returns false, if there is none like that + // never fails for n == 1 if not empty() + // pre: capacity() >= n * node_size() + bool find_chunk(std::size_t n) FOONATHAN_NOEXCEPT; + + //=== getter ===// + std::size_t node_size() const FOONATHAN_NOEXCEPT + { + return node_size_; + } + + // number of nodes remaining + std::size_t capacity() const FOONATHAN_NOEXCEPT + { + return capacity_; + } + + bool empty() const FOONATHAN_NOEXCEPT + { + return capacity_ == 0u; + } + + private: + // dummy_chunk_ is head/tail for used chunk list + chunk dummy_chunk_; + // alloc_chunk_ points to the chunk used for allocation + // dealloc_chunk_ points to the chunk last used for deallocation + // unused_chunk_ points to the head of a seperate list consisting of unused chunks + chunk *alloc_chunk_, *dealloc_chunk_, *unused_chunk_; + std::size_t node_size_, capacity_; + }; + } // namespace detail +}} // namespace foonathan::memory + +#endif // FOONATHAN_MEMORY_DETAIL_SMALL_FREE_LIST_HPP_INCLUDED diff --git a/example/allocator.cpp b/example/allocator.cpp index 564f9473..757faa04 100644 --- a/example/allocator.cpp +++ b/example/allocator.cpp @@ -12,7 +12,7 @@ using namespace foonathan; -// uses a RawAllocator or a class that has specialized the raw_allocator_traits +// uses a RawAllocator or a class that has specialized the allocator_traits template void use_allocator(RawAllocator &alloc) { @@ -24,7 +24,7 @@ void use_allocator(RawAllocator &alloc) std::vector a(alloc); // add some elements std::clog << "vector creation\n"; - for (auto i = 0u; i != 10u; ++i) + for (auto i = 0; i != 10; ++i) a.push_back(i); // remove the third one @@ -55,7 +55,7 @@ void use_allocator(RawAllocator &alloc) // insert and erase values std::clog << "insert/erase set\n"; - for (auto i = 0u; i != 10u; ++i) + for (auto i = 0; i != 10; ++i) set.insert(i); set.erase(2); set.erase(10); @@ -64,7 +64,7 @@ void use_allocator(RawAllocator &alloc) int main() { std::clog << std::unitbuf; - // a memory pool that supports arrays, each node is 32 bytes big, initially 4KB long + // a memory pool that supports arrays, each node is 32 bytes big, initially 4KiB long memory::memory_pool pool(32, 4096); { // allocate one such node @@ -87,24 +87,24 @@ int main() // tracker class that logs internal behavior of the allocator struct tracker { - void on_node_allocation(void *mem, std::size_t size, std::size_t) noexcept + void on_node_allocation(void *mem, std::size_t size, std::size_t) FOONATHAN_NOEXCEPT { std::clog << this << " node allocated: "; std::clog << mem << " (" << size << ") " << '\n'; } - void on_array_allocation(void *mem, std::size_t count, std::size_t size, std::size_t) noexcept + void on_array_allocation(void *mem, std::size_t count, std::size_t size, std::size_t) FOONATHAN_NOEXCEPT { std::clog << this << " array allocated: "; std::clog << mem << " (" << count << " * " << size << ") " << '\n'; } - void on_node_deallocation(void *ptr, std::size_t, std::size_t) noexcept + void on_node_deallocation(void *ptr, std::size_t, std::size_t) FOONATHAN_NOEXCEPT { std::clog << this << " node deallocated: " << ptr << " \n"; } - void on_array_deallocation(void *ptr, std::size_t, std::size_t, std::size_t) noexcept + void on_array_deallocation(void *ptr, std::size_t, std::size_t, std::size_t) FOONATHAN_NOEXCEPT { std::clog << this << " array deallocated: " << ptr << " \n"; } diff --git a/example/smart_ptr.cpp b/example/smart_ptr.cpp index 33977114..179606c1 100644 --- a/example/smart_ptr.cpp +++ b/example/smart_ptr.cpp @@ -17,7 +17,7 @@ void func(const std::shared_ptr &ptr) int main() { - // create a memory stack initially 4KB big + // create a memory stack initially 4KiB big memory::memory_stack<> stack(4096); // create a shared pointer @@ -27,7 +27,7 @@ int main() // create marker for stack unwinding auto m = stack.top(); - for (auto i = 0u; i != 10; ++i) + for (auto i = 0; i != 10; ++i) { // free all memory from previous iteration stack.unwind(m); diff --git a/example/temporary.cpp b/example/temporary.cpp new file mode 100644 index 00000000..79932bb7 --- /dev/null +++ b/example/temporary.cpp @@ -0,0 +1,66 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#include +#include +#include + +#include "../smart_ptr.hpp" // raw_allocate_unique +#include "../temporary_allocator.hpp" // temporary_allocator & co + +using namespace foonathan; + +template +void merge_sort(RAIter begin, RAIter end); + +int main() +{ + // creates a temporary allocator, it allows fast allocation for temporary memory + // it is similar to alloca but uses a memory_stack instead of the real stack + // each thread has its own internal memory_stack + // it is created on the first call to this function where you can optionally specify the stack size + // it is destroyed when the thread ends + auto alloc = memory::make_temporary_allocator(); + + // directly allocate memory + // it will be freed when alloc goes out of scope + std::cout << "Allocated: " << alloc.allocate(sizeof(int), FOONATHAN_ALIGNOF(int)) << '\n'; + + // create temporary array of 5 elements + auto array = memory::raw_allocate_unique(alloc, 5); + array[0] = 4; + array[1] = 2; + array[2] = 5; + array[3] = 1; + array[4] = 3; + + merge_sort(array.get(), array.get() + 5); + + std::cout << "Sorted: "; + for (auto i = 0u; i != 5; ++i) + std::cout << array[i] << ' '; + std::cout << '\n'; +} + +// naive implementation of merge_sort using temporary memory allocator +template +void merge_sort(RAIter begin, RAIter end) +{ + using value_type = typename std::iterator_traits::value_type; + using vector_t = std::vector>; + + if (end - begin <= 1) + return; + auto mid = begin + (end - begin) / 2; + + // create the vectors using temporary_allocator for fast memory allocation + // since the internal stack is already created in main() this does not invovle heap allocation + auto alloc = memory::make_temporary_allocator(); + vector_t first(begin, mid, alloc), second(mid, end, alloc); + + merge_sort(first.begin(), first.end()); + merge_sort(second.begin(), second.end()); + std::merge(first.begin(), first.end(), second.begin(), second.end(), begin); +} diff --git a/heap_allocator.cpp b/heap_allocator.cpp index 8f0d08df..8df13af2 100644 --- a/heap_allocator.cpp +++ b/heap_allocator.cpp @@ -8,6 +8,8 @@ #include #include +#include "config.hpp" + using namespace foonathan::memory; void* heap_allocator::allocate_node(std::size_t size, std::size_t) @@ -17,7 +19,12 @@ void* heap_allocator::allocate_node(std::size_t size, std::size_t) auto mem = std::malloc(size); if (mem) return mem; + #if FOONATHAN_IMPL_HAS_GET_NEW_HANDLER auto handler = std::get_new_handler(); + #else + auto handler = std::set_new_handler(nullptr); + std::set_new_handler(handler); + #endif if (!handler) throw std::bad_alloc(); handler(); @@ -25,8 +32,7 @@ void* heap_allocator::allocate_node(std::size_t size, std::size_t) assert(false); } -void heap_allocator::deallocate_node(void *ptr, - std::size_t size, std::size_t) noexcept +void heap_allocator::deallocate_node(void *ptr, std::size_t, std::size_t) FOONATHAN_NOEXCEPT { std::free(ptr); } diff --git a/heap_allocator.hpp b/heap_allocator.hpp index 205bab1f..ebbe324e 100644 --- a/heap_allocator.hpp +++ b/heap_allocator.hpp @@ -29,7 +29,7 @@ namespace foonathan { namespace memory void* allocate_node(std::size_t size, std::size_t alignment); /// \brief Deallocates raw memory. - void deallocate_node(void *ptr, std::size_t size, std::size_t alignment) noexcept; + void deallocate_node(void *ptr, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT; }; }} // namespace foonathan::memory diff --git a/new_allocator.cpp b/new_allocator.cpp index 159831be..581d3886 100644 --- a/new_allocator.cpp +++ b/new_allocator.cpp @@ -11,7 +11,7 @@ void* new_allocator::allocate_node(std::size_t size, std::size_t) return ::operator new(size); } -void new_allocator::deallocate_node(void* node, std::size_t size, std::size_t) noexcept +void new_allocator::deallocate_node(void* node, std::size_t, std::size_t) FOONATHAN_NOEXCEPT { ::operator delete(node); } diff --git a/new_allocator.hpp b/new_allocator.hpp index df65aa5e..15fea7ae 100644 --- a/new_allocator.hpp +++ b/new_allocator.hpp @@ -27,7 +27,7 @@ namespace foonathan { namespace memory void* allocate_node(std::size_t size, std::size_t alignment); /// \brief Deallocates memory using \c ::operator \c delete. - void deallocate_node(void *node, std::size_t size, std::size_t alignment) noexcept; + void deallocate_node(void *node, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT; }; }} // namespace foonathan::memory diff --git a/pool_allocator.hpp b/pool_allocator.hpp index 1c00e5a6..32c04cb8 100644 --- a/pool_allocator.hpp +++ b/pool_allocator.hpp @@ -8,6 +8,7 @@ /// \file /// \brief A pool allocator. +#include #include #include #include @@ -15,8 +16,9 @@ #include "detail/align.hpp" #include "detail/block_list.hpp" #include "detail/free_list.hpp" +#include "detail/small_free_list.hpp" #include "allocator_traits.hpp" -#include "heap_allocator.hpp" +#include "default_allocator.hpp" #include "pool_type.hpp" namespace foonathan { namespace memory @@ -26,31 +28,38 @@ namespace foonathan { namespace memory /// It manages nodes of fixed size. /// Allocating and deallocating such a node is really fast, /// but each has the given size.
- /// There are two types: one that is faster, but does not support arrays, - /// one that is slightly slower but does. - /// Use the \ref node_pool or \ref array_pool type to select it.
+ /// There different types of pools: + /// * \ref node_pool: doesn't support array allocations. + /// * \ref array_pool: does support array allocations but slower. + /// * \ref small_node_pool: optimized for small objects, low memory overhead, but slower. + /// Does not support array allocations.
/// It is no \ref concept::RawAllocator, but the \ref allocator_traits are specialized for it.
/// It allocates big blocks from an implementation allocator. /// If their size is sufficient, allocations are fast. /// \ingroup memory - template + template class memory_pool { - static_assert(std::is_same::value || - std::is_same::value, + static_assert(std::is_same::value || + std::is_same::value || + std::is_same::value, "invalid tag type"); + using free_list = typename std::conditional< + std::is_same::value, + detail::small_free_memory_list, + detail::free_memory_list>::type; + public: using impl_allocator = RawAllocator; - /// \brief The type of the pool (\ref node_pool or \ref array_pool). - // implementation node: pool_type::value is true for arrays - using pool_type = NodeOrArray; + /// \brief The type of the pool (\re_tef node_pool, \ref array_pool, \ref small_node_pool). + using pool_type = PoolType; /// \brief The minimum node size due to implementation reasons. - static constexpr auto min_node_size = detail::free_memory_list::min_element_size; + static FOONATHAN_CONSTEXPR std::size_t min_node_size = free_list::min_element_size; /// \brief Gives it the size of the nodes inside the pool and start block size. - /// \detail The first memory block is allocated, the block size can change. + /// \details The first memory block is allocated, the block size can change. memory_pool(std::size_t node_size, std::size_t block_size, impl_allocator allocator = impl_allocator()) : block_list_(block_size, std::move(allocator)), @@ -60,79 +69,73 @@ namespace foonathan { namespace memory } /// \brief Allocates a single node from the pool. - /// \detail It is aligned for \c std::min(node_size(), alignof(std::max_align_t). + /// \details It is aligned for \c std::min(node_size(), alignof(std::max_align_t). void* allocate_node() { if (free_list_.empty()) allocate_block(); + assert(!free_list_.empty()); return free_list_.allocate(); } /// \brief Allocates an array from the pool. - /// \detail Returns \c n subsequent nodes.
+ /// \details Returns \c n subsequent nodes.
/// If not \ref array_pool, may fail, throwing \c std::bad_alloc. void* allocate_array(std::size_t n) { - void *mem = nullptr; - if (free_list_.empty()) - { + static_assert(std::is_same::value, + "does not support array allocations"); + auto empty = free_list_.empty(); + if (empty) allocate_block(); - mem = free_list_.allocate(n); - } - else + auto mem = free_list_.allocate(n); + if (!mem && !empty) // only one allocate_block() call { + allocate_block(); mem = free_list_.allocate(n); - if (!mem) - { - allocate_block(); - mem = free_list_.allocate(n); - } } assert(mem && "invalid array size"); return mem; } /// \brief Deallocates a single node from the pool. - void deallocate_node(void *ptr) noexcept + void deallocate_node(void *ptr) FOONATHAN_NOEXCEPT { - if (pool_type::value) - free_list_.deallocate_ordered(ptr); - else - free_list_.deallocate(ptr); + detail::deallocate(pool_type{}, free_list_, ptr); } /// \brief Deallocates an array of nodes from the pool. - void deallocate_array(void *ptr, std::size_t n) noexcept + void deallocate_array(void *ptr, std::size_t n) FOONATHAN_NOEXCEPT { - if (pool_type::value) - free_list_.deallocate_ordered(ptr, n); - else - free_list_.deallocate(ptr, n); + static_assert(std::is_same::value, + "does not support array allocations"); + detail::deallocate(pool_type{}, free_list_, ptr, n); } /// \brief Returns the size of each node in the pool. - std::size_t node_size() const noexcept + std::size_t node_size() const FOONATHAN_NOEXCEPT { - return free_list_.element_size(); + return free_list_.node_size(); } /// \brief Returns the capacity remaining in the current block. - /// \detail This is the pure byte size, divide it by \ref node_size() to get the number of bytes. - std::size_t capacity() const noexcept + /// \details This is the number of bytes remaining. + /// Divide it by the \ref node_size() to get the number of nodes. + std::size_t capacity() const FOONATHAN_NOEXCEPT { - return free_list_.capacity(); + return free_list_.capacity() * node_size(); } /// \brief Returns the size of the next memory block. - /// \detail This is the new capacity after \ref capacity() is exhausted.
+ /// \details This is the new capacity after \ref capacity() is exhausted.
/// This is also the maximum array size. - std::size_t next_capacity() const noexcept + std::size_t next_capacity() const FOONATHAN_NOEXCEPT { - return block_list_.next_block_size(); + return block_list_.next_block_size() / node_size(); } /// \brief Returns the \ref impl_allocator. - impl_allocator& get_impl_allocator() noexcept + impl_allocator& get_impl_allocator() FOONATHAN_NOEXCEPT { return block_list_.get_allocator(); } @@ -141,38 +144,38 @@ namespace foonathan { namespace memory void allocate_block() { auto mem = block_list_.allocate(); - auto offset = detail::align_offset(mem.memory, alignof(std::max_align_t)); - mem.memory = static_cast(mem.memory) + offset; - if (pool_type::value) - free_list_.insert_ordered(mem.memory, mem.size); - else - free_list_.insert(mem.memory, mem.size); - capacity_ = mem.size; + auto offset = detail::align_offset(mem.memory, detail::max_alignment); + detail::insert(pool_type{}, free_list_, + static_cast(mem.memory) + offset, mem.size - offset); + capacity_ = mem.size - offset; } detail::block_list block_list_; - detail::free_memory_list free_list_; + free_list free_list_; std::size_t capacity_; }; + + template + FOONATHAN_CONSTEXPR std::size_t memory_pool::min_node_size; /// \brief Specialization of the \ref allocator_traits for a \ref memory_pool. - /// \detail This allows passing a pool directly as allocator to container types. + /// \details This allows passing a pool directly as allocator to container types. /// \ingroup memory - template - class allocator_traits> + template + class allocator_traits> { public: - using allocator_type = memory_pool; + using allocator_type = memory_pool; using is_stateful = std::true_type; /// @{ /// \brief Allocation functions forward to the pool allocation functions. - /// \detail Size and alignment of the nodes are ignored, since the pool handles it. + /// \details Size and alignment of the nodes are ignored, since the pool handles it. static void* allocate_node(allocator_type &state, std::size_t size, std::size_t alignment) { assert(size <= max_node_size(state) && "invalid node size"); - assert(alignment <= std::min(size, alignof(std::max_align_t)) && "invalid alignment"); + assert(alignment <= std::min(size, detail::max_alignment) && "invalid alignment"); return state.allocate_node(); } @@ -182,50 +185,71 @@ namespace foonathan { namespace memory assert(size <= max_node_size(state) && "invalid node size"); assert(alignment <= max_alignment(state) && "invalid alignment"); assert(count * size <= max_array_size(state) && "invalid array size"); - if (size == max_node_size(state)) - return state.allocate_array(count); - auto ratio = max_node_size(state) / size; - return state.allocate_array(count / ratio + 1); + return allocate_array(PoolType{}, state, count, size); } /// @} /// @{ /// \brief Deallocation functions forward to the pool deallocation functions. static void deallocate_node(allocator_type &state, - void *node, std::size_t, std::size_t) noexcept + void *node, std::size_t, std::size_t) FOONATHAN_NOEXCEPT { state.deallocate_node(node); } static void deallocate_array(allocator_type &state, - void *array, std::size_t count, std::size_t size, std::size_t) noexcept + void *array, std::size_t count, std::size_t size, std::size_t) FOONATHAN_NOEXCEPT { - if (size == max_node_size(state)) - state.deallocate_array(array, count); - else - { - auto ratio = max_node_size(state) / size; - state.deallocate_array(array, count / ratio + 1); - } + deallocate_array(PoolType{}, state, array, count, size); } /// @} /// \brief Maximum size of a node is the pool's node size. - static std::size_t max_node_size(const allocator_type &state) noexcept + static std::size_t max_node_size(const allocator_type &state) FOONATHAN_NOEXCEPT { return state.node_size(); } /// \brief Maximum size of an array is the capacity in the next block of the pool. - static std::size_t max_array_size(const allocator_type &state) noexcept + static std::size_t max_array_size(const allocator_type &state) FOONATHAN_NOEXCEPT { return state.next_capacity(); } /// \brief Maximum alignment is \c std::min(node_size(), alignof(std::max_align_t). - static std::size_t max_alignment(const allocator_type &state) noexcept + static std::size_t max_alignment(const allocator_type &state) FOONATHAN_NOEXCEPT + { + return std::min(state.node_size(), detail::max_alignment); + } + + private: + static void* allocate_array(std::false_type, allocator_type &, + std::size_t, std::size_t) + { + assert(!"array allocations not supported"); + return nullptr; + } + + static void* allocate_array(std::true_type, allocator_type &state, + std::size_t count, std::size_t size) + { + auto n = detail::free_memory_list::calc_block_count + (max_node_size(state), count, size); + return state.allocate_array(n); + } + + static void deallocate_array(std::false_type, allocator_type &, + void *, std::size_t, std::size_t) + { + assert(!"array allocations not supported"); + } + + static void deallocate_array(std::true_type, allocator_type &state, + void *array, std::size_t count, std::size_t size) { - return std::min(state.node_size(), alignof(std::max_align_t)); + auto n = detail::free_memory_list::calc_block_count + (max_node_size(state), count, size); + state.deallocate_array(array, n); } }; }} // namespace foonathan::memory diff --git a/pool_collection.cpp b/pool_collection.cpp index 36087aba..bbfdf709 100644 --- a/pool_collection.cpp +++ b/pool_collection.cpp @@ -15,7 +15,7 @@ using namespace detail; namespace { template - constexpr bool is_power_of_two(Integral no) noexcept + bool is_power_of_two(Integral no) FOONATHAN_NOEXCEPT { return no && (no & (no - 1)) == 0; } @@ -26,30 +26,30 @@ namespace // we have a builtin to count leading zeros, use it // subtract one if power of two, otherwise 0 // multiple overloads to support each size of std::size_t - constexpr std::size_t ilog2(unsigned int no) noexcept + std::size_t ilog2(unsigned int no) FOONATHAN_NOEXCEPT { - return sizeof(no) * CHAR_BIT - __builtin_clz(no) - is_power_of_two(no); + return sizeof(no) * CHAR_BIT - unsigned(__builtin_clz(no)) - unsigned(is_power_of_two(no)); } - constexpr std::size_t ilog2(unsigned long no) noexcept + std::size_t ilog2(unsigned long no) FOONATHAN_NOEXCEPT { - return sizeof(no) * CHAR_BIT - __builtin_clzl(no) - is_power_of_two(no); + return sizeof(no) * CHAR_BIT - unsigned(__builtin_clzl(no)) - unsigned(is_power_of_two(no)); } - constexpr std::size_t ilog2(unsigned long long no) noexcept + std::size_t ilog2(unsigned long long no) FOONATHAN_NOEXCEPT { - return sizeof(no) * CHAR_BIT - __builtin_clzll(no) - is_power_of_two(no); + return sizeof(no) * CHAR_BIT - unsigned(__builtin_clzll(no)) - unsigned(is_power_of_two(no)); } #elif FLT_RADIX == 2 // floating points exponent are for base 2, use ilogb to get the exponent // subtract one if power of two, otherwise zero - std::size_t ilog2(std::size_t no) noexcept + std::size_t ilog2(std::size_t no) FOONATHAN_NOEXCEPT { - return std::ilogb(no) - is_power_of_two(no); + return std::ilogb(no) - unsigned(is_power_of_two(no)); } #else // just ceil log2 - std::size_t ilog2(std::size_t no) noexcept + std::size_t ilog2(std::size_t no) FOONATHAN_NOEXCEPT { std::ceil(std::log2(no)); } @@ -59,30 +59,27 @@ namespace const auto min_node_size_log = ilog2(free_memory_list::min_element_size); } -static_assert(std::is_trivially_destructible::value, - "free_list_array currently does not call any destructors"); - free_list_array::free_list_array(fixed_memory_stack &stack, - std::size_t max_node_size) noexcept + std::size_t max_node_size) FOONATHAN_NOEXCEPT { assert(max_node_size >= free_memory_list::min_element_size && "too small max_node_size"); auto no_pools = ilog2(max_node_size) - min_node_size_log + 1; auto pools_size = no_pools * sizeof(free_memory_list); nodes_ = static_cast(stack.allocate(pools_size, - alignof(free_memory_list))); + FOONATHAN_ALIGNOF(free_memory_list))); arrays_ = static_cast(stack.allocate(pools_size, - alignof(free_memory_list))); + FOONATHAN_ALIGNOF(free_memory_list))); assert(nodes_ && arrays_ && "insufficient memory block size"); for (std::size_t i = 0u; i != no_pools; ++i) { - auto el_size = 1 << (i + min_node_size_log); + std::size_t el_size = 1u << (i + min_node_size_log); ::new(static_cast(nodes_ + i)) free_memory_list(el_size); ::new(static_cast(arrays_ + i)) free_memory_list(el_size); } } -free_memory_list& free_list_array::get_node(std::size_t node_size) noexcept +free_memory_list& free_list_array::get_node(std::size_t node_size) FOONATHAN_NOEXCEPT { auto i = ilog2(node_size); if (i < min_node_size_log) @@ -90,7 +87,7 @@ free_memory_list& free_list_array::get_node(std::size_t node_size) noexcept return nodes_[i - min_node_size_log]; } -const free_memory_list& free_list_array::get_node(std::size_t node_size) const noexcept +const free_memory_list& free_list_array::get_node(std::size_t node_size) const FOONATHAN_NOEXCEPT { auto i = ilog2(node_size); if (i < min_node_size_log) @@ -98,7 +95,7 @@ const free_memory_list& free_list_array::get_node(std::size_t node_size) const n return nodes_[i - min_node_size_log]; } -free_memory_list& free_list_array::get_array(std::size_t node_size) noexcept +free_memory_list& free_list_array::get_array(std::size_t node_size) FOONATHAN_NOEXCEPT { auto i = ilog2(node_size); if (i < min_node_size_log) @@ -106,7 +103,7 @@ free_memory_list& free_list_array::get_array(std::size_t node_size) noexcept return arrays_[i - min_node_size_log]; } -const free_memory_list& free_list_array::get_array(std::size_t node_size) const noexcept +const free_memory_list& free_list_array::get_array(std::size_t node_size) const FOONATHAN_NOEXCEPT { auto i = ilog2(node_size); if (i < min_node_size_log) @@ -114,7 +111,7 @@ const free_memory_list& free_list_array::get_array(std::size_t node_size) const return arrays_[i]; } -std::size_t free_list_array::max_node_size() const noexcept +std::size_t free_list_array::max_node_size() const FOONATHAN_NOEXCEPT { return 1 << (size() + min_node_size_log); } diff --git a/pool_collection.hpp b/pool_collection.hpp index 03859e8b..e7aed626 100644 --- a/pool_collection.hpp +++ b/pool_collection.hpp @@ -15,7 +15,7 @@ #include "detail/free_list.hpp" #include "detail/memory_stack.hpp" #include "allocator_traits.hpp" -#include "heap_allocator.hpp" +#include "default_allocator.hpp" #include "pool_type.hpp" namespace foonathan { namespace memory @@ -26,21 +26,21 @@ namespace foonathan { namespace memory { public: free_list_array(fixed_memory_stack &stack, - std::size_t max_node_size) noexcept; + std::size_t max_node_size) FOONATHAN_NOEXCEPT; - free_memory_list& get_node(std::size_t node_size) noexcept; - const free_memory_list& get_node(std::size_t node_size) const noexcept; + free_memory_list& get_node(std::size_t node_size) FOONATHAN_NOEXCEPT; + const free_memory_list& get_node(std::size_t node_size) const FOONATHAN_NOEXCEPT; - free_memory_list& get_array(std::size_t node_size) noexcept; - const free_memory_list& get_array(std::size_t node_size) const noexcept; + free_memory_list& get_array(std::size_t node_size) FOONATHAN_NOEXCEPT; + const free_memory_list& get_array(std::size_t node_size) const FOONATHAN_NOEXCEPT; // no of pools - std::size_t size() const noexcept + std::size_t size() const FOONATHAN_NOEXCEPT { - return arrays_ - nodes_; + return std::size_t(arrays_ - nodes_); } - std::size_t max_node_size() const noexcept; + std::size_t max_node_size() const FOONATHAN_NOEXCEPT; private: free_memory_list *nodes_, *arrays_; @@ -49,17 +49,17 @@ namespace foonathan { namespace memory } // namespace detail /// \brief Manages multiple memory pools, each with a fixed size. - /// \detail This allows allocating of nodes of various sizes.
+ /// \details This allows allocating of nodes of various sizes.
/// Otherwise behaves the same as \ref memory_pool. /// \ingroup memory - template + template class memory_pool_collection { public: using impl_allocator = RawAllocator; /// \brief Creates a new pool collection with given max node size the memory block size. - /// \detail It can handle node sizes up to a given size.
+ /// \details It can handle node sizes up to a given size.
/// The first memory block is allocated, the block size can change. memory_pool_collection(std::size_t max_node_size, std::size_t block_size, impl_allocator alloc = impl_allocator()) @@ -69,7 +69,7 @@ namespace foonathan { namespace memory {} /// \brief Allocates a node of given size. - /// \detail It selects the smallest node pool with sufficient size, + /// \details It selects the smallest node pool with sufficient size, /// the size must be smaller than the maximum node size. void* allocate_node(std::size_t node_size) { @@ -80,32 +80,37 @@ namespace foonathan { namespace memory } /// \brief Allocates an array of given node size and number of elements. - /// \detail It selects the smallest node pool with sufficient size, + /// \details It selects the smallest node pool with sufficient size, /// the size must be smaller than the maximum node size. void* allocate_array(std::size_t count, std::size_t node_size) { auto& pool = pools_.get_array(node_size); if (pool.empty()) reserve_impl(pool, def_capacity(), &detail::free_memory_list::insert_ordered); - return pool.allocate(count); + auto n = detail::free_memory_list::calc_block_count + (pool.node_size(), count, node_size); + return pool.allocate(n); } /// @{ /// \brief Deallocates the memory into the appropriate pool. - void deallocate_node(void *memory, std::size_t node_size) noexcept + void deallocate_node(void *memory, std::size_t node_size) FOONATHAN_NOEXCEPT { pools_.get_node(node_size).deallocate(memory); } - void deallocate_array(void *memory, std::size_t count, std::size_t node_size) noexcept + void deallocate_array(void *memory, std::size_t count, std::size_t node_size) FOONATHAN_NOEXCEPT { - pools_.get_array(node_size).deallocate_ordered(memory, count); + auto& pool = pools_.get_array(node_size); + auto n = detail::free_memory_list::calc_block_count + (pool.node_size(), count, node_size); + pool.deallocate_ordered(memory, n); } /// @} /// @{ /// \brief Reserves memory for the node/array pool for a given node size. - /// \detail Use the \ref node_pool or \ref array_pool parameter to check it. + /// \details Use the \ref node_pool or \ref array_pool parameter to check it. void reserve(node_pool, std::size_t node_size, std::size_t capacity) { auto& pool = pools_.get_node(node_size); @@ -120,49 +125,49 @@ namespace foonathan { namespace memory /// @} /// \brief Returns the maximum node size for which there is a pool. - std::size_t max_node_size() const noexcept + std::size_t max_node_size() const FOONATHAN_NOEXCEPT { return pools_.max_node_size(); } /// @{ /// \brief Returns the capacity available in the node/array pool for a given node size. - /// \detail This is the amount of memory available inside the given pool.
+ /// \details This is the amount of nodes available inside the given pool.
/// Use the \ref node_pool or \ref array_pool parameter to check it. - std::size_t pool_capacity(node_pool, std::size_t node_size) const noexcept + std::size_t pool_capacity(node_pool, std::size_t node_size) const FOONATHAN_NOEXCEPT { return pools_.get_node(node_size).capacity(); } - std::size_t pool_capacity(array_pool, std::size_t node_size) const noexcept + std::size_t pool_capacity(array_pool, std::size_t node_size) const FOONATHAN_NOEXCEPT { return pools_.get_array(node_size).capacity(); } /// @} /// \brief Returns the capacity available outside the pools. - /// \detail This is the amount of memory that can be given to the pools after they are exhausted. - std::size_t capacity() const noexcept + /// \details This is the amount of memory that can be given to the pools after they are exhausted. + std::size_t capacity() const FOONATHAN_NOEXCEPT { return stack_.end() - stack_.top(); } /// \brief Returns the size of the next memory block. - /// \detail This is the new capacity after \ref capacity() is exhausted.
+ /// \details This is the new capacity after \ref capacity() is exhausted.
/// This is also the maximum array size. - std::size_t next_capacity() const noexcept + std::size_t next_capacity() const FOONATHAN_NOEXCEPT { return block_list_.next_block_size(); } /// \brief Returns the \ref impl_allocator. - impl_allocator& get_impl_allocator() noexcept + impl_allocator& get_impl_allocator() FOONATHAN_NOEXCEPT { return block_list_.get_allocator(); } private: - std::size_t def_capacity() const noexcept + std::size_t def_capacity() const FOONATHAN_NOEXCEPT { return block_list_.next_block_size() / (pools_.size() * 2); } @@ -170,8 +175,7 @@ namespace foonathan { namespace memory void reserve_impl(detail::free_memory_list &pool, std::size_t capacity, void (detail::free_memory_list::*insert)(void*, std::size_t)) { - // alignment guaranteed - auto mem = stack_.allocate(capacity, alignof(std::max_align_t)); + auto mem = stack_.allocate(capacity, detail::max_alignment); if (!mem) { // insert rest @@ -179,7 +183,7 @@ namespace foonathan { namespace memory (pool.*insert)(stack_.top(), stack_.end() - stack_.top()); stack_ = detail::fixed_memory_stack(block_list_.allocate()); // allocate ensuring alignment - mem = stack_.allocate(capacity, alignof(std::max_align_t)); + mem = stack_.allocate(capacity, detail::max_alignment); assert(mem); } // insert new @@ -191,8 +195,8 @@ namespace foonathan { namespace memory detail::free_list_array pools_; }; - // \brief Specialization of the \ref allocator_traits for a \ref memory_pool_collection. - /// \detail This allows passing a pool directly as allocator to container types. + /// \brief Specialization of the \ref allocator_traits for a \ref memory_pool_collection. + /// \details This allows passing a pool directly as allocator to container types. /// \ingroup memory template class allocator_traits> @@ -205,7 +209,7 @@ namespace foonathan { namespace memory std::size_t size, std::size_t alignment) { assert(size <= max_node_size(state) && "invalid node size"); - assert(alignment <= alignof(std::max_align_t) && "invalid alignment"); + assert(alignment <= detail::max_alignment && "invalid alignment"); return state.allocate_node(size); } @@ -213,39 +217,39 @@ namespace foonathan { namespace memory std::size_t size, std::size_t alignment) { assert(size <= max_node_size(state) && "invalid node size"); - assert(alignment <= alignof(std::max_align_t) && "invalid alignment"); + assert(alignment <= detail::max_alignment && "invalid alignment"); assert(count * size <= max_array_size(state) && "invalid array count"); return state.allocate_array(count, size); } static void deallocate_node(allocator_type &state, - void *node, std::size_t size, std::size_t) noexcept + void *node, std::size_t size, std::size_t) FOONATHAN_NOEXCEPT { state.deallocate_node(node, size); } static void deallocate_array(allocator_type &state, - void *array, std::size_t count, std::size_t size, std::size_t) noexcept + void *array, std::size_t count, std::size_t size, std::size_t) FOONATHAN_NOEXCEPT { state.deallocate_array(array, count, size); } /// \brief Maximum size of a node is the maximum pool collections node size. - static std::size_t max_node_size(const allocator_type &state) noexcept + static std::size_t max_node_size(const allocator_type &state) FOONATHAN_NOEXCEPT { return state.max_node_size(); } /// \brief Maximum size of an array is the capacity in the next block of the pool. - static std::size_t max_array_size(const allocator_type &state) noexcept + static std::size_t max_array_size(const allocator_type &state) FOONATHAN_NOEXCEPT { return state.next_capacity(); } /// \brief Maximum alignment is alignof(std::max_align_t). - static std::size_t max_alignment(const allocator_type &) noexcept + static std::size_t max_alignment(const allocator_type &) FOONATHAN_NOEXCEPT { - return alignof(std::max_align_t); + return detail::max_alignment; } }; }} // namespace foonathan::portal diff --git a/pool_type.cpp b/pool_type.cpp new file mode 100644 index 00000000..fc9fd455 --- /dev/null +++ b/pool_type.cpp @@ -0,0 +1,49 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#include "pool_type.hpp" + +#include "detail/free_list.hpp" +#include "detail/small_free_list.hpp" + +using namespace foonathan::memory; +using namespace detail; + +void detail::insert(node_pool, free_memory_list &free_list, + void *ptr, std::size_t size) FOONATHAN_NOEXCEPT +{ + free_list.insert(ptr, size); +} + +void detail::insert(array_pool, free_memory_list &free_list, + void *ptr, std::size_t size) FOONATHAN_NOEXCEPT +{ + free_list.insert_ordered(ptr, size); +} + +void detail::insert(small_node_pool, small_free_memory_list &free_list, + void *ptr, std::size_t size) FOONATHAN_NOEXCEPT +{ + free_list.insert(ptr, size); +} + +void detail::deallocate(node_pool, free_memory_list &free_list, void *node) FOONATHAN_NOEXCEPT +{ + free_list.deallocate(node); +} + +void detail::deallocate(array_pool, free_memory_list &free_list, void *node) FOONATHAN_NOEXCEPT +{ + free_list.deallocate_ordered(node); +} + +void detail::deallocate(array_pool, free_memory_list &free_list, void *node, std::size_t n) FOONATHAN_NOEXCEPT +{ + free_list.deallocate_ordered(node, n); +} + +void detail::deallocate(small_node_pool, small_free_memory_list &free_list, void *node) FOONATHAN_NOEXCEPT +{ + free_list.deallocate(node); +} diff --git a/pool_type.hpp b/pool_type.hpp index ea4b0f61..b83800de 100644 --- a/pool_type.hpp +++ b/pool_type.hpp @@ -5,15 +5,44 @@ #ifndef FOONATHAN_MEMORY_POOL_TYPE_HPP_INCLUDED #define FOONATHAN_MEMORY_POOL_TYPE_HPP_INCLUDED +#include + +#include "config.hpp" + namespace foonathan { namespace memory { /// @{ /// \brief Tag types defining whether or not a pool supports arrays. - /// \detail An \c array_pool supports both node and arrays. + /// \details An \c array_pool supports both node and arrays. /// \ingroup memory struct node_pool : std::false_type {}; struct array_pool : std::true_type {}; /// @} + + /// \brief Tag type indicating a pool for small objects. + /// \details A small node pool does not support arrays. + /// \ingroup memory + struct small_node_pool : std::false_type {}; + + namespace detail + { + class free_memory_list; + class small_free_memory_list; + + // either calls insert or insert_ordered + void insert(node_pool, free_memory_list &free_list, + void *ptr, std::size_t size) FOONATHAN_NOEXCEPT; + void insert(array_pool, free_memory_list &free_list, + void *ptr, std::size_t size) FOONATHAN_NOEXCEPT; + void insert(small_node_pool, small_free_memory_list &free_list, + void *ptr, std::size_t size) FOONATHAN_NOEXCEPT; + + // either calls deallocate or deallocate ordered + void deallocate(node_pool, free_memory_list &free_list, void *node) FOONATHAN_NOEXCEPT; + void deallocate(array_pool, free_memory_list &free_list, void *node) FOONATHAN_NOEXCEPT; + void deallocate(array_pool, free_memory_list &free_list, void *node, std::size_t n) FOONATHAN_NOEXCEPT; + void deallocate(small_node_pool, small_free_memory_list &free_list, void *node) FOONATHAN_NOEXCEPT; + } // namespace detail }} // namespace foonathan::memory #endif // FOONATHAN_MEMORY_POOL_TYPE_HPP_INCLUDED diff --git a/raw_allocator_base.hpp b/raw_allocator_base.hpp index bfbaf0bc..eeaead4b 100644 --- a/raw_allocator_base.hpp +++ b/raw_allocator_base.hpp @@ -12,6 +12,8 @@ #include #include +#include "detail/align.hpp" + namespace foonathan { namespace memory { /// \brief Base class that generates default implementations for the more repetitive functions. @@ -19,13 +21,9 @@ namespace foonathan { namespace memory class raw_allocator_base { public: - raw_allocator_base() = default; - - raw_allocator_base(const raw_allocator_base&) = delete; - raw_allocator_base(raw_allocator_base&&) = default; - - raw_allocator_base& operator=(const raw_allocator_base &) = delete; - raw_allocator_base& operator=(raw_allocator_base &&) = default; + raw_allocator_base() FOONATHAN_NOEXCEPT {} + raw_allocator_base(raw_allocator_base&&) FOONATHAN_NOEXCEPT {} + raw_allocator_base& operator=(raw_allocator_base &&) FOONATHAN_NOEXCEPT {return *this;} /// @{ /// \brief Array allocation forwards to node allocation. @@ -36,7 +34,7 @@ namespace foonathan { namespace memory } void deallocate_array(void *ptr, std::size_t count, - std::size_t size, std::size_t alignment) noexcept + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { static_cast(this)->deallocate_node(ptr, count * size, alignment); } @@ -44,25 +42,25 @@ namespace foonathan { namespace memory /// @{ /// \brief Returns maximum value. - std::size_t max_node_size() const noexcept + std::size_t max_node_size() const FOONATHAN_NOEXCEPT { return std::numeric_limits::max(); } - std::size_t max_array_size() const noexcept + std::size_t max_array_size() const FOONATHAN_NOEXCEPT { return std::numeric_limits::max(); } /// @} /// \brief Returns \c alignof(std::max_align_t). - std::size_t max_alignment() const noexcept + std::size_t max_alignment() const FOONATHAN_NOEXCEPT { - return alignof(std::max_align_t); + return detail::max_alignment; } protected: - ~raw_allocator_base() noexcept = default; + ~raw_allocator_base() FOONATHAN_NOEXCEPT = default; }; }} // namespace foonathan::memory diff --git a/smart_ptr.hpp b/smart_ptr.hpp index 8f1395cc..bc67331a 100644 --- a/smart_ptr.hpp +++ b/smart_ptr.hpp @@ -15,65 +15,65 @@ namespace foonathan { namespace memory { /// \brief A deleter class that calls the appropriate deallocate function. - /// \detail It stores an \ref raw_allocator_adapter. It does not call any destrucotrs. + /// \details It stores an \ref allocator_reference. It does not call any destrucotrs. /// \ingroup memory template - class raw_allocator_deallocator : raw_allocator_adapter + class raw_allocator_deallocator : allocator_reference { public: using raw_allocator = RawAllocator; using value_type = Type; /// \brief Creates it giving it the allocator used for deallocation. - raw_allocator_deallocator(raw_allocator_adapter alloc) noexcept - : raw_allocator_adapter(std::move(alloc)) {} + raw_allocator_deallocator(allocator_reference alloc) FOONATHAN_NOEXCEPT + : allocator_reference(std::move(alloc)) {} /// \brief Deallocates the memory via the stored allocator. - /// \detail It calls \ref allocator_traits::deallocate_node, but no destructors. - void operator()(value_type *pointer) noexcept + /// \details It calls \ref allocator_traits::deallocate_node, but no destructors. + void operator()(value_type *pointer) FOONATHAN_NOEXCEPT { - this->deallocate_node(pointer, sizeof(value_type), alignof(value_type)); + this->deallocate_node(pointer, sizeof(value_type), FOONATHAN_ALIGNOF(value_type)); } /// \brief Returns a reference to the stored allocator. - auto get_allocator() const noexcept - -> decltype(this->raw_allocator_adapter::get_allocator()) + auto get_allocator() const FOONATHAN_NOEXCEPT + -> decltype(std::declval>().get_allocator()) { - return this->raw_allocator_adapter::get_allocator(); + return this->allocator_reference::get_allocator(); } }; /// \brief Specialization of \ref raw_allocator_deallocator for arrays. /// \ingroup memory template - class raw_allocator_deallocator : raw_allocator_adapter + class raw_allocator_deallocator : allocator_reference { public: using raw_allocator = RawAllocator; using value_type = Type; /// \brief Creates it giving it the allocator used for deallocation and the array size. - raw_allocator_deallocator(raw_allocator_adapter alloc, - std::size_t size) noexcept - : raw_allocator_adapter(std::move(alloc)), + raw_allocator_deallocator(allocator_reference alloc, + std::size_t size) FOONATHAN_NOEXCEPT + : allocator_reference(std::move(alloc)), size_(size) {} /// \brief Deallocates the memory via the stored allocator. - /// \detail It calls \ref allocator_traits::deallocate_array, but no destructors. - void operator()(value_type *pointer) noexcept + /// \details It calls \ref allocator_traits::deallocate_array, but no destructors. + void operator()(value_type *pointer) FOONATHAN_NOEXCEPT { - this->deallocate_array(pointer, size_, sizeof(value_type), alignof(value_type)); + this->deallocate_array(pointer, size_, sizeof(value_type), FOONATHAN_ALIGNOF(value_type)); } /// \brief Returns a reference to the stored allocator. - auto get_allocator() const noexcept - -> decltype(this->raw_allocator_adapter::get_allocator()) + auto get_allocator() const FOONATHAN_NOEXCEPT + -> decltype(std::declval>().get_allocator()) { - return this->raw_allocator_adapter::get_allocator(); + return this->allocator_reference::get_allocator(); } /// \brief Returns the array size. - std::size_t array_size() const noexcept + std::size_t array_size() const FOONATHAN_NOEXCEPT { return size_; } @@ -83,68 +83,68 @@ namespace foonathan { namespace memory }; /// \brief A deleter class that calls the appropriate destructors and deallocate function. - /// \detail It stores an \ref raw_allocator_adapter. It calls destructors. + /// \details It stores an \ref allocator_reference. It calls destructors. /// \ingroup memory template - class raw_allocator_deleter : raw_allocator_adapter + class raw_allocator_deleter : allocator_reference { public: using raw_allocator = RawAllocator; using value_type = Type; /// \brief Creates it giving it the allocator used for deallocation. - raw_allocator_deleter(raw_allocator_adapter alloc) noexcept - : raw_allocator_adapter(std::move(alloc)) {} + raw_allocator_deleter(allocator_reference alloc) FOONATHAN_NOEXCEPT + : allocator_reference(std::move(alloc)) {} /// \brief Deallocates the memory via the stored allocator. - /// \detail It calls the destructor and \ref allocator_traits::deallocate_node. - void operator()(value_type *pointer) noexcept + /// \details It calls the destructor and \ref allocator_traits::deallocate_node. + void operator()(value_type *pointer) FOONATHAN_NOEXCEPT { pointer->~value_type(); - this->deallocate_node(pointer, sizeof(value_type), alignof(value_type)); + this->deallocate_node(pointer, sizeof(value_type), FOONATHAN_ALIGNOF(value_type)); } /// \brief Returns a reference to the stored allocator. - auto get_allocator() const noexcept - -> decltype(this->raw_allocator_adapter::get_allocator()) + auto get_allocator() const FOONATHAN_NOEXCEPT + -> decltype(std::declval>().get_allocator()) { - return this->raw_allocator_adapter::get_allocator(); + return this->allocator_reference::get_allocator(); } }; /// \brief Specialization of \ref raw_allocator_deleter for arrays. /// \ingroup memory template - class raw_allocator_deleter : raw_allocator_adapter + class raw_allocator_deleter : allocator_reference { public: using raw_allocator = RawAllocator; using value_type = Type; /// \brief Creates it giving it the allocator used for deallocation and the array size. - raw_allocator_deleter(raw_allocator_adapter alloc, - std::size_t size) noexcept - : raw_allocator_adapter(std::move(alloc)), + raw_allocator_deleter(allocator_reference alloc, + std::size_t size) FOONATHAN_NOEXCEPT + : allocator_reference(std::move(alloc)), size_(size) {} /// \brief Deallocates the memory via the stored allocator. - /// \detail It calls the destructors and \ref allocator_traits::deallocate_array. - void operator()(value_type *pointer) noexcept + /// \details It calls the destructors and \ref allocator_traits::deallocate_array. + void operator()(value_type *pointer) FOONATHAN_NOEXCEPT { for (auto cur = pointer; cur != pointer + size_; ++cur) cur->~value_type(); - this->deallocate_array(pointer, size_, sizeof(value_type), alignof(value_type)); + this->deallocate_array(pointer, size_, sizeof(value_type), FOONATHAN_ALIGNOF(value_type)); } /// \brief Returns a reference to the stored allocator. - auto get_allocator() const noexcept - -> decltype(this->raw_allocator_adapter::get_allocator()) + auto get_allocator() const FOONATHAN_NOEXCEPT + -> decltype(std::declval>().get_allocator()) { - return this->raw_allocator_adapter::get_allocator(); + return this->allocator_reference::get_allocator(); } /// \brief Returns the array size. - std::size_t array_size() const noexcept + std::size_t array_size() const FOONATHAN_NOEXCEPT { return size_; } @@ -156,12 +156,12 @@ namespace foonathan { namespace memory namespace detail { template - auto allocate_unique(raw_allocator_adapter alloc, Args&&... args) + auto allocate_unique(allocator_reference alloc, Args&&... args) -> std::unique_ptr> { using raw_ptr = std::unique_ptr>; - auto memory = alloc.allocate_node(sizeof(T), alignof(T)); + auto memory = alloc.allocate_node(sizeof(T), FOONATHAN_ALIGNOF(T)); // raw_ptr deallocates memory in case of constructor exception raw_ptr result(static_cast(memory), {alloc}); // call constructor @@ -195,15 +195,15 @@ namespace foonathan { namespace memory } template - auto allocate_array_unique(std::size_t size, raw_allocator_adapter alloc) + auto allocate_array_unique(std::size_t size, allocator_reference alloc) -> std::unique_ptr> { using raw_ptr = std::unique_ptr>; - auto memory = alloc.allocate_array(size, sizeof(T), alignof(T)); + auto memory = alloc.allocate_array(size, sizeof(T), FOONATHAN_ALIGNOF(T)); // raw_ptr deallocates memory in case of constructor exception raw_ptr result(static_cast(memory), {alloc, size}); - construct(std::integral_constant{}, + construct(std::integral_constant{}, result.get(), result.get() + size); // pass ownership to return value using a deleter that calls destructor return {result.release(), {alloc, size}}; @@ -220,7 +220,7 @@ namespace foonathan { namespace memory std::unique_ptr::type>> >::type { - return detail::allocate_unique(make_adapter(std::forward(alloc)), + return detail::allocate_unique(make_allocator_reference(std::forward(alloc)), std::forward(args)...); } @@ -235,7 +235,7 @@ namespace foonathan { namespace memory >::type { return detail::allocate_array_unique::type> - (size, make_adapter(std::forward(alloc))); + (size, make_allocator_reference(std::forward(alloc))); } /// \brief Creates an object wrapped in a \c std::shared_ptr using a \ref concept::RawAllocator. diff --git a/stack_allocator.hpp b/stack_allocator.hpp index e7c3a6a2..81569150 100644 --- a/stack_allocator.hpp +++ b/stack_allocator.hpp @@ -15,11 +15,29 @@ #include "detail/block_list.hpp" #include "detail/memory_stack.hpp" #include "allocator_traits.hpp" -#include "heap_allocator.hpp" +#include "default_allocator.hpp" #include "raw_allocator_base.hpp" namespace foonathan { namespace memory { + template + class memory_stack; + + namespace detail + { + class stack_marker + { + std::size_t index; + detail::fixed_memory_stack stack; + + stack_marker(std::size_t i, detail::fixed_memory_stack s) FOONATHAN_NOEXCEPT + : index(i), stack(s) {} + + template + friend class memory::memory_stack; + }; + } // namespace detail + /// \brief A memory stack. /// /// Allows fast memory allocations but deallocation is only possible via markers. @@ -28,7 +46,7 @@ namespace foonathan { namespace memory /// It allocates big blocks from an implementation allocator. /// If their size is sufficient, allocations are fast. /// \ingroup memory - template + template class memory_stack { public: @@ -36,7 +54,7 @@ namespace foonathan { namespace memory using impl_allocator = RawAllocator; /// \brief Constructs it with a given start block size. - /// \detail The first memory block is allocated, the block size can change. + /// \details The first memory block is allocated, the block size can change. explicit memory_stack(std::size_t block_size, impl_allocator allocator = impl_allocator()) : list_(block_size, std::move(allocator)) @@ -45,7 +63,7 @@ namespace foonathan { namespace memory } /// \brief Allocates a memory block of given size and alignment. - /// \detail If it does not fit into the current block, a new one will be allocated. + /// \details If it does not fit into the current block, a new one will be allocated. /// The new block must be big enough for the requested memory. void* allocate(std::size_t size, std::size_t alignment) { @@ -60,27 +78,18 @@ namespace foonathan { namespace memory } /// \brief Marker type for unwinding. - class marker - { - std::size_t index; - detail::fixed_memory_stack stack; - - marker(std::size_t i, detail::fixed_memory_stack stack) noexcept - : index(i), stack(stack) {} - - friend memory_stack; - }; + using marker = detail::stack_marker; /// \brief Returns a marker to the current top of the stack. - marker top() const noexcept + marker top() const FOONATHAN_NOEXCEPT { return {list_.size() - 1, stack_}; } /// \brief Unwinds the stack to a certain marker. - /// \detail It must be less than the previous one. + /// \details It must be less than the previous one. /// Any access blocks are freed. - void unwind(marker m) noexcept + void unwind(marker m) FOONATHAN_NOEXCEPT { auto diff = list_.size() - m.index - 1; for (auto i = 0u; i != diff; ++i) @@ -89,27 +98,27 @@ namespace foonathan { namespace memory } /// \brief Returns the capacity remaining in the current block. - std::size_t capacity() const noexcept + std::size_t capacity() const FOONATHAN_NOEXCEPT { return stack_.end() - stack_.top(); } /// \brief Returns the size of the next memory block. - /// \detail This is the new capacity after \ref capacity() is exhausted.
+ /// \details This is the new capacity after \ref capacity() is exhausted.
/// This is also the maximum array size. - std::size_t next_capacity() const noexcept + std::size_t next_capacity() const FOONATHAN_NOEXCEPT { return list_.next_block_size(); } /// \brief Frees all unused memory blocks. - void shrink_to_fit() noexcept + void shrink_to_fit() FOONATHAN_NOEXCEPT { list_.shrink_to_fit(); } /// \brief Returns the \ref impl_allocator. - impl_allocator& get_impl_allocator() noexcept + impl_allocator& get_impl_allocator() FOONATHAN_NOEXCEPT { return list_.get_allocator(); } @@ -125,8 +134,8 @@ namespace foonathan { namespace memory detail::fixed_memory_stack stack_; }; - /// \brief Specialization of the \ref allocator_traits for a \ref memory_state. - /// \detail This allows passing a state directly as allocator to container types. + /// \brief Specialization of the \ref allocator_traits for a \ref memory_stack. + /// \details This allows passing a state directly as allocator to container types. /// \ingroup memory template class allocator_traits> @@ -153,29 +162,29 @@ namespace foonathan { namespace memory /// @{ /// \brief Deallocation functions do nothing, use unwinding on the stack to free memory. static void deallocate_node(const allocator_type &, - void *, std::size_t, std::size_t) noexcept {} + void *, std::size_t, std::size_t) FOONATHAN_NOEXCEPT {} static void deallocate_array(const allocator_type &, - void *, std::size_t, std::size_t, std::size_t) noexcept {} + void *, std::size_t, std::size_t, std::size_t) FOONATHAN_NOEXCEPT {} /// @} /// @{ /// \brief The maximum size is the equivalent of the \ref next_capacity(). - static std::size_t max_node_size(const allocator_type &state) noexcept + static std::size_t max_node_size(const allocator_type &state) FOONATHAN_NOEXCEPT { return state.next_capacity(); } - static std::size_t max_array_size(const allocator_type &state) noexcept + static std::size_t max_array_size(const allocator_type &state) FOONATHAN_NOEXCEPT { return state.next_capacity(); } /// @} /// \brief There is no maximum alignment (except indirectly through \ref next_capacity()). - static std::size_t max_alignment(const allocator_type &) noexcept + static std::size_t max_alignment(const allocator_type &) FOONATHAN_NOEXCEPT { - return 0; + return std::size_t(-1); } }; }} // namespace foonathan::memory diff --git a/std_allocator_base.hpp b/std_allocator_base.hpp index 5882bc52..8ec0a0fb 100644 --- a/std_allocator_base.hpp +++ b/std_allocator_base.hpp @@ -39,12 +39,12 @@ namespace foonathan { namespace memory } template - void destroy(U *p) noexcept + void destroy(U *p) FOONATHAN_NOEXCEPT { p->~U(); } - size_type max_size() const noexcept + size_type max_size() const FOONATHAN_NOEXCEPT { return std::numeric_limits::max(); } @@ -55,7 +55,7 @@ namespace foonathan { namespace memory } protected: - ~std_allocator_base() noexcept = default; + ~std_allocator_base() FOONATHAN_NOEXCEPT = default; }; }} // namespace foonathan::memory diff --git a/temporary_allocator.cpp b/temporary_allocator.cpp new file mode 100644 index 00000000..c24022ca --- /dev/null +++ b/temporary_allocator.cpp @@ -0,0 +1,136 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#include "temporary_allocator.hpp" + +#include + +#include "default_allocator.hpp" +#include "raw_allocator_base.hpp" + +using namespace foonathan::memory; + +namespace +{ + class stack_impl_allocator : public raw_allocator_base + { + public: + stack_impl_allocator() FOONATHAN_NOEXCEPT {} + + void* allocate_node(std::size_t size, std::size_t alignment) + { + if (!first_call_) + tracker_(size); + else + first_call_ = false; + return default_allocator().allocate_node(size, alignment); + } + + void deallocate_node(void *memory, std::size_t size, std::size_t alignment) + { + default_allocator().deallocate_node(memory, size, alignment); + } + + static temporary_allocator::growth_tracker set_tracker(temporary_allocator::growth_tracker t) + { + auto old = tracker_; + tracker_ = t; + return old; + } + + private: + static void default_tracker(std::size_t) FOONATHAN_NOEXCEPT {} + + static FOONATHAN_THREAD_LOCAL temporary_allocator::growth_tracker tracker_; + static FOONATHAN_THREAD_LOCAL bool first_call_; + }; + + FOONATHAN_THREAD_LOCAL temporary_allocator::growth_tracker + stack_impl_allocator::tracker_ = stack_impl_allocator::default_tracker; + FOONATHAN_THREAD_LOCAL bool stack_impl_allocator::first_call_ = true; + + using stack_type = memory_stack; + using storage_t = std::aligned_storage::type; + FOONATHAN_THREAD_LOCAL storage_t temporary_stack; + // whether or not the temporary_stack has been created + FOONATHAN_THREAD_LOCAL bool is_created = false; + + stack_type& get() FOONATHAN_NOEXCEPT + { + assert(is_created); + return *static_cast(static_cast(&temporary_stack)); + } + + stack_type& create(std::size_t size) + { + if (!is_created) + { + ::new(static_cast(&temporary_stack)) stack_type(size); + is_created = true; + } + return get(); + } +} + +detail::temporary_allocator_dtor_t::temporary_allocator_dtor_t() FOONATHAN_NOEXCEPT +{ + ++nifty_counter_; +} + +detail::temporary_allocator_dtor_t::~temporary_allocator_dtor_t() FOONATHAN_NOEXCEPT +{ + if (--nifty_counter_ == 0u && is_created) + { + get().~stack_type(); + // at this point the current thread is over, so boolean not necessary + } +} + +FOONATHAN_THREAD_LOCAL std::size_t detail::temporary_allocator_dtor_t::nifty_counter_ = 0u; + +temporary_allocator::growth_tracker temporary_allocator::set_growth_tracker(growth_tracker t) FOONATHAN_NOEXCEPT +{ + return stack_impl_allocator::set_tracker(t); +} + +temporary_allocator::temporary_allocator(temporary_allocator &&other) FOONATHAN_NOEXCEPT +: marker_(other.marker_), prev_(top_), unwind_(true) +{ + other.unwind_ = false; + top_ = this; +} + +temporary_allocator::~temporary_allocator() FOONATHAN_NOEXCEPT +{ + if (unwind_) + get().unwind(marker_); + top_ = prev_; +} + +temporary_allocator& temporary_allocator::operator=(temporary_allocator &&other) FOONATHAN_NOEXCEPT +{ + marker_ = other.marker_; + unwind_ = true; + other.unwind_ = false; + return *this; +} + +void* temporary_allocator::allocate(std::size_t size, std::size_t alignment) +{ + assert(top_ == this && "must allocate from top temporary allocator"); + return get().allocate(size, alignment); +} + +temporary_allocator::temporary_allocator(std::size_t size) FOONATHAN_NOEXCEPT +: marker_(create(size).top()), prev_(nullptr), unwind_(true) +{ + top_ = this; +} + +FOONATHAN_THREAD_LOCAL const temporary_allocator* temporary_allocator::top_ = nullptr; + +std::size_t allocator_traits::max_node_size(const allocator_type &) FOONATHAN_NOEXCEPT +{ + return get().next_capacity(); +} diff --git a/temporary_allocator.hpp b/temporary_allocator.hpp new file mode 100644 index 00000000..ce1371d0 --- /dev/null +++ b/temporary_allocator.hpp @@ -0,0 +1,128 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#ifndef FOONATHAN_MEMORY_TEMPORARY_ALLOCATOR_HPP_INCLUDED +#define FOONATHAN_MEMORY_TEMPORARY_ALLOCATOR_HPP_INCLUDED + +#include "allocator_traits.hpp" +#include "config.hpp" +#include "stack_allocator.hpp" + +namespace foonathan { namespace memory +{ + namespace detail + { + static class temporary_allocator_dtor_t + { + public: + temporary_allocator_dtor_t() FOONATHAN_NOEXCEPT; + ~temporary_allocator_dtor_t() FOONATHAN_NOEXCEPT; + private: + static FOONATHAN_THREAD_LOCAL std::size_t nifty_counter_; + } temporary_allocator_dtor; + } // namespace detail + + /// \brief A memory allocator for temporary allocations. + /// \details It is similar to \c alloca() but portable. + /// It uses a \c thread_local \ref memory_stack<> for the allocation.
+ /// It is no \ref concept::RawAllocator, but the \ref allocator_traits are specialized for it.
+ /// \ingroup memory + class temporary_allocator + { + public: + /// \brief The type of the growth tracker. + /// \details It gets called when the internal \ref memory_stack<> needs to grow. + /// It gets the size of the new block that will be allocated. + /// If this function doesn't return, growth is prevented but the allocator unusable.
+ /// Each thread has its own internal stack and thus own tracker. + using growth_tracker = void(*)(std::size_t size); + + /// \brief Exchanges the \ref growth_tracker. + static growth_tracker set_growth_tracker(growth_tracker t) FOONATHAN_NOEXCEPT; + + temporary_allocator(temporary_allocator &&other) FOONATHAN_NOEXCEPT; + ~temporary_allocator() FOONATHAN_NOEXCEPT; + + temporary_allocator& operator=(temporary_allocator &&other) FOONATHAN_NOEXCEPT; + + /// \brief Allocates temporary memory of given size and alignment. + /// \details It will be deallocated when the allocator goes out of scope.
+ /// For that reason, allocation must be made from the most recent created allocator. + void* allocate(std::size_t size, std::size_t alignment); + + private: + temporary_allocator(std::size_t size) FOONATHAN_NOEXCEPT; + + static FOONATHAN_THREAD_LOCAL const temporary_allocator *top_; + memory_stack<>::marker marker_; + const temporary_allocator *prev_; + bool unwind_; + + friend temporary_allocator make_temporary_allocator(std::size_t size) FOONATHAN_NOEXCEPT; + }; + + /// \brief Creates a new \ref temporary_allocator. + /// \details This is the only way to create to avoid accidental creation not on the stack.
+ /// The internal stack allocator will only be created in a thread if there is at least one call to this function. + /// If it is the call that actually creates it, the stack has the initial size passed to it. + /// The stack will be destroyed when the current thread ends, so there is - no growth needed - only one heap allocation per thread. + /// \relates temporary_allocator + inline temporary_allocator make_temporary_allocator(std::size_t size = 4096u) FOONATHAN_NOEXCEPT + { + return {size}; + } + + /// \brief Specialization of the \ref allocator_traits for \ref temporary_allocator. + /// \details This allows passing a pool directly as allocator to container types. + /// \ingroup memory + template <> + class allocator_traits + { + public: + using allocator_type = temporary_allocator; + using is_stateful = std::true_type; + + /// @{ + /// \brief Allocation function forward to the temporary allocator for array and node. + static void* allocate_node(allocator_type &state, std::size_t size, std::size_t alignment) + { + assert(size <= max_node_size(state) && "invalid node size"); + return state.allocate(size, alignment); + } + + static void* allocate_array(allocator_type &state, std::size_t count, + std::size_t size, std::size_t alignment) + { + return allocate_node(state, count * size, alignment); + } + /// @} + + /// @{ + /// \brief Deallocation functions do nothing, everything is freed on scope exit. + static void deallocate_node(const allocator_type &, + void *, std::size_t, std::size_t) FOONATHAN_NOEXCEPT {} + + static void deallocate_array(const allocator_type &, + void *, std::size_t, std::size_t, std::size_t) FOONATHAN_NOEXCEPT {} + /// @} + + /// @{ + /// \brief The maximum size is the equivalent of the capacity left in the next block of the internal \ref memory_stack<>. + static std::size_t max_node_size(const allocator_type &state) FOONATHAN_NOEXCEPT; + + static std::size_t max_array_size(const allocator_type &state) FOONATHAN_NOEXCEPT + { + return max_node_size(state); + } + /// @} + + /// \brief There is no maximum alignment (except indirectly through \ref max_node_size()). + static std::size_t max_alignment(const allocator_type &) FOONATHAN_NOEXCEPT + { + return std::size_t(-1); + } + }; +}} // namespace foonathan::memory + +#endif // FOONATHAN_MEMORY_TEMPORARY_ALLOCATOR_HPP_INCLUDED diff --git a/threading.hpp b/threading.hpp new file mode 100644 index 00000000..511d8791 --- /dev/null +++ b/threading.hpp @@ -0,0 +1,228 @@ +// Copyright (C) 2015 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#ifndef FOONATHAN_MEMORY_THREADING_HPP_INCLUDED +#define FOONATHAN_MEMORY_THREADING_HPP_INCLUDED + +/// \file +/// \brief Adapters to share allocators between threads. + +#include + +#include "allocator_traits.hpp" +#include "config.hpp" + +namespace foonathan { namespace memory +{ + /// \brief A dummy mutex class that does not lock anything. + /// \details It serves the \c Mutex concept. Use it to disable locking for adapters. + /// \ingroup memory + struct dummy_mutex + { + void lock() FOONATHAN_NOEXCEPT {} + bool try_lock() FOONATHAN_NOEXCEPT {return true;} + void unlock() FOONATHAN_NOEXCEPT {} + }; + + /// \brief The default mutex used by \ref allocator_reference. + /// \details It is \c std::mutex if \ref FOONATHAN_MEMORY_THREAD_SAFE_ADAPTER is \c true, \ref dummy_mutex otherwise. + /// \ingroup memory +#if FOONATHAN_MEMORY_THREAD_SAFE_ADAPTER + using default_mutex = std::mutex; +#else + using default_mutex = dummy_mutex; +#endif + + namespace detail + { + // selects a mutex for an Allocator + // stateless allocators don't need locking + template + using mutex_for = typename std::conditional::is_stateful::value, + Mutex, dummy_mutex>::type; + + // storage for mutexes to use EBO + // it provides const lock/unlock function, inherit from it + template + class mutex_storage + { + public: + mutex_storage() FOONATHAN_NOEXCEPT = default; + mutex_storage(const mutex_storage &) FOONATHAN_NOEXCEPT {} + + mutex_storage& operator=(const mutex_storage &) FOONATHAN_NOEXCEPT + { + return *this; + } + + void lock() const + { + mutex_.lock(); + } + + void unlock() const FOONATHAN_NOEXCEPT + { + mutex_.unlock(); + } + + protected: + ~mutex_storage() FOONATHAN_NOEXCEPT = default; + private: + mutable Mutex mutex_; + }; + + template <> + class mutex_storage + { + public: + mutex_storage() FOONATHAN_NOEXCEPT = default; + + void lock() const FOONATHAN_NOEXCEPT {} + void unlock() const FOONATHAN_NOEXCEPT {} + protected: + ~mutex_storage() FOONATHAN_NOEXCEPT = default; + }; + + // non changeable pointer to an Allocator that keeps a lock + // I don't think EBO is necessary here... + template + class locked_allocator + { + public: + locked_allocator(Alloc &alloc, Mutex &m) FOONATHAN_NOEXCEPT + : lock_(m), alloc_(&alloc) {} + + locked_allocator(locked_allocator &&other) FOONATHAN_NOEXCEPT + : lock_(std::move(other.lock_)), alloc_(other.alloc_) {} + + Alloc& operator*() const FOONATHAN_NOEXCEPT + { + return *alloc_; + } + + Alloc* operator->() const FOONATHAN_NOEXCEPT + { + return alloc_; + } + + private: + std::unique_lock lock_; + Alloc *alloc_; + }; + } // namespace detail + + /// \brief An allocator adapter that uses a mutex for synchronizing. + /// \details It locks the mutex for each function called. + /// It will not look anything if the allocator is stateless. + /// \ingroup memory + template + class thread_safe_allocator : RawAllocator, + detail::mutex_storage> + { + using traits = allocator_traits; + using actual_mutex = const detail::mutex_storage>; + public: + using raw_allocator = RawAllocator; + using mutex = Mutex; + + using is_stateful = std::true_type; + + thread_safe_allocator(raw_allocator &&alloc = {}) + : raw_allocator(std::move(alloc)) {} + + void* allocate_node(std::size_t size, std::size_t alignment) + { + std::lock_guard lock(*this); + return traits::allocate_node(get_allocator(), size, alignment); + } + + void* allocate_array(std::size_t count, std::size_t size, std::size_t alignment) + { + std::lock_guard lock(*this); + return traits::allocate_array(get_allocator(), count, size, alignment); + } + + void deallocate_node(void *ptr, + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT + { + std::lock_guard lock(*this); + traits::deallocate_node(get_allocator(), ptr, size, alignment); + } + + void deallocate_array(void *ptr, std::size_t count, + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT + { + std::lock_guard lock(*this); + traits::deallocate_array(get_allocator(), ptr, count, size, alignment); + } + + std::size_t max_node_size() const + { + std::lock_guard lock(*this); + return traits::max_node_size(get_allocator()); + } + + std::size_t max_array_size() const + { + std::lock_guard lock(*this); + return traits::max_array_size(get_allocator()); + } + + std::size_t max_alignment() const + { + std::lock_guard lock(*this); + return traits::max_alignment(get_allocator()); + } + + /// @{ + /// \brief Returns a reference to the allocator. + /// \details It is not synchronized, so race conditions might occur. + raw_allocator& get_allocator() FOONATHAN_NOEXCEPT + { + return *this; + } + + const raw_allocator& get_allocator() const FOONATHAN_NOEXCEPT + { + return *this; + } + /// @} + + /// @{ + /// \brief Returns a pointer to the allocator while keeping it locked. + /// \details It returns a proxy object that holds the lock. + /// It has overloaded operator* and -> to give access to the allocator + /// but it can't be reassigned to a different allocator object. + detail::locked_allocator lock() FOONATHAN_NOEXCEPT + { + return {*this, *this}; + } + + detail::locked_allocator lock() const FOONATHAN_NOEXCEPT + { + return {*this, *this}; + } + /// @} + }; + + /// @{ + /// \brief Creates a \ref thread_safe_allocator. + /// \relates thread_safe_allocator + template + auto make_thread_safe_allocator(RawAllocator &&allocator) + -> thread_safe_allocator::type> + { + return std::forward(allocator); + } + + template + auto make_thread_safe_allocator(RawAllocator &&allocator) + -> thread_safe_allocator::type, Mutex> + { + return std::forward(allocator); + } + /// @} +}} // namespace foonathan::memory + +#endif // FOONATHAN_MEMORY_THREADING_HPP_INCLUDED diff --git a/tracking.hpp b/tracking.hpp index a0347a4b..4604b104 100644 --- a/tracking.hpp +++ b/tracking.hpp @@ -45,30 +45,30 @@ namespace foonathan { namespace memory } void deallocate_node(void *ptr, - std::size_t size, std::size_t alignment) noexcept + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { traits::deallocate_node(*this, ptr, size, alignment); t_->on_allocator_shrinking(ptr, size); } void deallocate_array(void *ptr, std::size_t count, - std::size_t size, std::size_t alignment) noexcept + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { traits::deallocate_array(*this, ptr, count, size, alignment); t_->on_allocator_shrinking(ptr, size * count); } - std::size_t max_node_size() const noexcept + std::size_t max_node_size() const { return traits::max_node_size(*this); } - std::size_t max_array_size() const noexcept + std::size_t max_array_size() const { return traits::max_array_size(*this); } - std::size_t max_alignment() const noexcept + std::size_t max_alignment() const { return traits::max_alignment(*this); } @@ -79,13 +79,13 @@ namespace foonathan { namespace memory } // namespace detail /// \brief A wrapper around an \ref concept::RawAllocator that allows logging. - /// \detail The \c Tracker must provide the following, \c noexcept functions: + /// \details The \c Tracker must provide the following, \c FOONATHAN_NOEXCEPT functions: /// * \c on_node_allocation(void *memory, std::size_t size, std::size_t alignment) /// * \c on_node_deallocation(void *memory, std::size_t size, std::size_t alignment) /// * \c on_array_allocation(void *memory, std::size_t count, std::size_t size, std::size_t alignment) /// * \c on_array_deallocation(void *memory, std::size_t count, std::size_t size, std::size_t alignment) ///
If you use a deeply tracked allocator via the appropriate \ref make_tracked_allocator() overload, - /// the \c Tracker must also provide the following two, \c noexcept functions: + /// the \c Tracker must also provide the following two, \c FOONATHAN_NOEXCEPT functions: /// * \c on_allocator_growth(void *memory, std::size_t total_size) /// * \c on_allocator_shrinking(void *memory, std::size_t total_size) ///
They are called on the allocation/deallocation functions of the implementation allocator. @@ -103,7 +103,7 @@ namespace foonathan { namespace memory using is_stateful = std::integral_constant::value>; - explicit tracked_allocator(tracker t = {}, raw_allocator allocator = {}) + explicit tracked_allocator(tracker t = {}, raw_allocator&& allocator = {}) : tracker(std::move(t)), raw_allocator(std::move(allocator)) {} /// @{ @@ -123,14 +123,14 @@ namespace foonathan { namespace memory } void deallocate_node(void *ptr, - std::size_t size, std::size_t alignment) noexcept + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { this->on_node_deallocation(ptr, size, alignment); traits::deallocate_node(get_allocator(), ptr, size, alignment); } void deallocate_array(void *ptr, std::size_t count, - std::size_t size, std::size_t alignment) noexcept + std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT { this->on_array_deallocation(ptr, count, size, alignment); traits::deallocate_array(get_allocator(), ptr, count, size, alignment); @@ -139,17 +139,17 @@ namespace foonathan { namespace memory /// @{ /// \brief Forwards to the allocator. - std::size_t max_node_size() const noexcept + std::size_t max_node_size() const { return traits::max_node_size(get_allocator()); } - std::size_t max_array_size() const noexcept + std::size_t max_array_size() const { return traits::max_array_size(get_allocator()); } - std::size_t max_alignment() const noexcept + std::size_t max_alignment() const { return traits::max_alignment(get_allocator()); } @@ -157,12 +157,12 @@ namespace foonathan { namespace memory /// @{ /// \brief Returns a reference to the allocator. - raw_allocator& get_allocator() noexcept + raw_allocator& get_allocator() FOONATHAN_NOEXCEPT { return *this; } - const raw_allocator& get_allocator() const noexcept + const raw_allocator& get_allocator() const FOONATHAN_NOEXCEPT { return *this; } @@ -170,12 +170,12 @@ namespace foonathan { namespace memory /// @{ /// \brief Returns a reference to the tracker. - tracker& get_tracker() noexcept + tracker& get_tracker() FOONATHAN_NOEXCEPT { return *this; } - const tracker& get_tracker() const noexcept + const tracker& get_tracker() const FOONATHAN_NOEXCEPT { return *this; } @@ -198,14 +198,14 @@ namespace foonathan { namespace memory /// \brief Creates a \ref tracked_allocator. /// \relates tracked_allocator template - auto make_tracked_allocator(Tracker t, RawAllocator alloc) - -> tracked_allocator + auto make_tracked_allocator(Tracker t, RawAllocator &&alloc) + -> tracked_allocator::type> { - return tracked_allocator(std::move(t), std::move(alloc)); + return tracked_allocator::type>{std::move(t), std::forward(alloc)}; } /// \brief Creates a deeply tracked \ref tracked_allocator. - /// \detail It also tracks allocator growth, that is, when allocators with implementation allocator (e.g. memory_stack), + /// \details It also tracks allocator growth, that is, when allocators with implementation allocator (e.g. \ref memory_stack), /// run out of memory blocks and need to allocate new, slow memory.
/// It is detected by wrapping the implementation allocator into an adapter and calling the appropriate tracker functions /// on allocation/deallocation of the implementation allocator.