cannam@62: // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors cannam@62: // Licensed under the MIT License: cannam@62: // cannam@62: // Permission is hereby granted, free of charge, to any person obtaining a copy cannam@62: // of this software and associated documentation files (the "Software"), to deal cannam@62: // in the Software without restriction, including without limitation the rights cannam@62: // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell cannam@62: // copies of the Software, and to permit persons to whom the Software is cannam@62: // furnished to do so, subject to the following conditions: cannam@62: // cannam@62: // The above copyright notice and this permission notice shall be included in cannam@62: // all copies or substantial portions of the Software. cannam@62: // cannam@62: // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR cannam@62: // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, cannam@62: // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE cannam@62: // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER cannam@62: // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, cannam@62: // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN cannam@62: // THE SOFTWARE. cannam@62: cannam@62: #ifndef KJ_ARENA_H_ cannam@62: #define KJ_ARENA_H_ cannam@62: cannam@62: #if defined(__GNUC__) && !KJ_HEADER_WARNINGS cannam@62: #pragma GCC system_header cannam@62: #endif cannam@62: cannam@62: #include "memory.h" cannam@62: #include "array.h" cannam@62: #include "string.h" cannam@62: cannam@62: namespace kj { cannam@62: cannam@62: class Arena { cannam@62: // A class which allows several objects to be allocated in contiguous chunks of memory, then cannam@62: // frees them all at once. cannam@62: // cannam@62: // Allocating from the same Arena in multiple threads concurrently is NOT safe, because making cannam@62: // it safe would require atomic operations that would slow down allocation even when cannam@62: // single-threaded. If you need to use arena allocation in a multithreaded context, consider cannam@62: // allocating thread-local arenas. cannam@62: cannam@62: public: cannam@62: explicit Arena(size_t chunkSizeHint = 1024); cannam@62: // Create an Arena. `chunkSizeHint` hints at where to start when allocating chunks, but is only cannam@62: // a hint -- the Arena will, for example, allocate progressively larger chunks as time goes on, cannam@62: // in order to reduce overall allocation overhead. cannam@62: cannam@62: explicit Arena(ArrayPtr scratch); cannam@62: // Allocates from the given scratch space first, only resorting to the heap when it runs out. cannam@62: cannam@62: KJ_DISALLOW_COPY(Arena); cannam@62: ~Arena() noexcept(false); cannam@62: cannam@62: template cannam@62: T& allocate(Params&&... params); cannam@62: template cannam@62: ArrayPtr allocateArray(size_t size); cannam@62: // Allocate an object or array of type T. If T has a non-trivial destructor, that destructor cannam@62: // will be run during the Arena's destructor. Such destructors are run in opposite order of cannam@62: // allocation. Note that these methods must maintain a list of destructors to call, which has cannam@62: // overhead, but this overhead only applies if T has a non-trivial destructor. cannam@62: cannam@62: template cannam@62: Own allocateOwn(Params&&... params); cannam@62: template cannam@62: Array allocateOwnArray(size_t size); cannam@62: template cannam@62: ArrayBuilder allocateOwnArrayBuilder(size_t capacity); cannam@62: // Allocate an object or array of type T. Destructors are executed when the returned Own cannam@62: // or Array goes out-of-scope, which must happen before the Arena is destroyed. This variant cannam@62: // is useful when you need to control when the destructor is called. This variant also avoids cannam@62: // the need for the Arena itself to keep track of destructors to call later, which may make it cannam@62: // slightly more efficient. cannam@62: cannam@62: template cannam@62: inline T& copy(T&& value) { return allocate>(kj::fwd(value)); } cannam@62: // Allocate a copy of the given value in the arena. This is just a shortcut for calling the cannam@62: // type's copy (or move) constructor. cannam@62: cannam@62: StringPtr copyString(StringPtr content); cannam@62: // Make a copy of the given string inside the arena, and return a pointer to the copy. cannam@62: cannam@62: private: cannam@62: struct ChunkHeader { cannam@62: ChunkHeader* next; cannam@62: byte* pos; // first unallocated byte in this chunk cannam@62: byte* end; // end of this chunk cannam@62: }; cannam@62: struct ObjectHeader { cannam@62: void (*destructor)(void*); cannam@62: ObjectHeader* next; cannam@62: }; cannam@62: cannam@62: size_t nextChunkSize; cannam@62: ChunkHeader* chunkList = nullptr; cannam@62: ObjectHeader* objectList = nullptr; cannam@62: cannam@62: ChunkHeader* currentChunk = nullptr; cannam@62: cannam@62: void cleanup(); cannam@62: // Run all destructors, leaving the above pointers null. If a destructor throws, the State is cannam@62: // left in a consistent state, such that if cleanup() is called again, it will pick up where cannam@62: // it left off. cannam@62: cannam@62: void* allocateBytes(size_t amount, uint alignment, bool hasDisposer); cannam@62: // Allocate the given number of bytes. `hasDisposer` must be true if `setDisposer()` may be cannam@62: // called on this pointer later. cannam@62: cannam@62: void* allocateBytesInternal(size_t amount, uint alignment); cannam@62: // Try to allocate the given number of bytes without taking a lock. Fails if and only if there cannam@62: // is no space left in the current chunk. cannam@62: cannam@62: void setDestructor(void* ptr, void (*destructor)(void*)); cannam@62: // Schedule the given destructor to be executed when the Arena is destroyed. `ptr` must be a cannam@62: // pointer previously returned by an `allocateBytes()` call for which `hasDisposer` was true. cannam@62: cannam@62: template cannam@62: static void destroyArray(void* pointer) { cannam@62: size_t elementCount = *reinterpret_cast(pointer); cannam@62: constexpr size_t prefixSize = kj::max(alignof(T), sizeof(size_t)); cannam@62: DestructorOnlyArrayDisposer::instance.disposeImpl( cannam@62: reinterpret_cast(pointer) + prefixSize, cannam@62: sizeof(T), elementCount, elementCount, &destroyObject); cannam@62: } cannam@62: cannam@62: template cannam@62: static void destroyObject(void* pointer) { cannam@62: dtor(*reinterpret_cast(pointer)); cannam@62: } cannam@62: }; cannam@62: cannam@62: // ======================================================================================= cannam@62: // Inline implementation details cannam@62: cannam@62: template cannam@62: T& Arena::allocate(Params&&... params) { cannam@62: T& result = *reinterpret_cast(allocateBytes( cannam@62: sizeof(T), alignof(T), !__has_trivial_destructor(T))); cannam@62: if (!__has_trivial_constructor(T) || sizeof...(Params) > 0) { cannam@62: ctor(result, kj::fwd(params)...); cannam@62: } cannam@62: if (!__has_trivial_destructor(T)) { cannam@62: setDestructor(&result, &destroyObject); cannam@62: } cannam@62: return result; cannam@62: } cannam@62: cannam@62: template cannam@62: ArrayPtr Arena::allocateArray(size_t size) { cannam@62: if (__has_trivial_destructor(T)) { cannam@62: ArrayPtr result = cannam@62: arrayPtr(reinterpret_cast(allocateBytes( cannam@62: sizeof(T) * size, alignof(T), false)), size); cannam@62: if (!__has_trivial_constructor(T)) { cannam@62: for (size_t i = 0; i < size; i++) { cannam@62: ctor(result[i]); cannam@62: } cannam@62: } cannam@62: return result; cannam@62: } else { cannam@62: // Allocate with a 64-bit prefix in which we store the array size. cannam@62: constexpr size_t prefixSize = kj::max(alignof(T), sizeof(size_t)); cannam@62: void* base = allocateBytes(sizeof(T) * size + prefixSize, alignof(T), true); cannam@62: size_t& tag = *reinterpret_cast(base); cannam@62: ArrayPtr result = cannam@62: arrayPtr(reinterpret_cast(reinterpret_cast(base) + prefixSize), size); cannam@62: setDestructor(base, &destroyArray); cannam@62: cannam@62: if (__has_trivial_constructor(T)) { cannam@62: tag = size; cannam@62: } else { cannam@62: // In case of constructor exceptions, we need the tag to end up storing the number of objects cannam@62: // that were successfully constructed, so that they'll be properly destroyed. cannam@62: tag = 0; cannam@62: for (size_t i = 0; i < size; i++) { cannam@62: ctor(result[i]); cannam@62: tag = i + 1; cannam@62: } cannam@62: } cannam@62: return result; cannam@62: } cannam@62: } cannam@62: cannam@62: template cannam@62: Own Arena::allocateOwn(Params&&... params) { cannam@62: T& result = *reinterpret_cast(allocateBytes(sizeof(T), alignof(T), false)); cannam@62: if (!__has_trivial_constructor(T) || sizeof...(Params) > 0) { cannam@62: ctor(result, kj::fwd(params)...); cannam@62: } cannam@62: return Own(&result, DestructorOnlyDisposer::instance); cannam@62: } cannam@62: cannam@62: template cannam@62: Array Arena::allocateOwnArray(size_t size) { cannam@62: ArrayBuilder result = allocateOwnArrayBuilder(size); cannam@62: for (size_t i = 0; i < size; i++) { cannam@62: result.add(); cannam@62: } cannam@62: return result.finish(); cannam@62: } cannam@62: cannam@62: template cannam@62: ArrayBuilder Arena::allocateOwnArrayBuilder(size_t capacity) { cannam@62: return ArrayBuilder( cannam@62: reinterpret_cast(allocateBytes(sizeof(T) * capacity, alignof(T), false)), cannam@62: capacity, DestructorOnlyArrayDisposer::instance); cannam@62: } cannam@62: cannam@62: } // namespace kj cannam@62: cannam@62: #endif // KJ_ARENA_H_